Example usage for java.nio.file Files newBufferedReader

List of usage examples for java.nio.file Files newBufferedReader

Introduction

In this page you can find the example usage for java.nio.file Files newBufferedReader.

Prototype

public static BufferedReader newBufferedReader(Path path) throws IOException 

Source Link

Document

Opens a file for reading, returning a BufferedReader to read text from the file in an efficient manner.

Usage

From source file:org.mortbay.jetty.load.generator.starter.AbstractLoadGeneratorStarter.java

public Resource getResource() throws Exception {
    if (resource != null) {
        return resource;
    }/*  w ww.  ja v a2s.c  o  m*/

    if (starterArgs.getProfileJsonPath() != null) {
        Path profilePath = Paths.get(starterArgs.getProfileJsonPath());
        if (Files.exists(profilePath)) {
            return resource = evaluateJson(profilePath);
        }
    }
    if (starterArgs.getProfileXmlPath() != null) {
        Path profilePath = Paths.get(starterArgs.getProfileXmlPath());
        try (InputStream inputStream = Files.newInputStream(profilePath)) {
            return resource = (Resource) new XmlConfiguration(inputStream).configure();
        }
    }
    if (starterArgs.getProfileGroovyPath() != null) {
        Path profilePath = Paths.get(starterArgs.getProfileGroovyPath());

        try (Reader reader = Files.newBufferedReader(profilePath)) {
            return resource = (Resource) evaluateScript(reader);
        }
    }

    throw new IllegalArgumentException("not resource profile file defined");
}

From source file:jvmoptions.OptionAnalyzer.java

static String preprocess(Path hpp) throws IOException {
    Observable<String> globals_hpp = StringObservable.from(Files.newBufferedReader(hpp));
    Observable<String> o = StringObservable.split(globals_hpp, "\r?\n")
            .skipWhile(line -> BEGIN.matcher(line).matches() == false).skip(2)
            .map(line -> line.replaceAll("\\\\$", "")).map(line -> line.replaceAll("/\\*.*?\\*/", ""))
            .filter(line -> line.matches("^[ ]+$") == false);
    String all = StringObservable.join(o, "").toBlockingObservable().first();
    return all.replaceAll("EMBEDDED_ONLY\\(\\w*\\(.*?\\)\\)", "");
}

From source file:javalibs.CSVDataNormalizer.java

private void readCSV() {
    try {//from  ww  w  .  j  a  v a  2 s  .c  om
        CSVParser parser = new CSVParser(Files.newBufferedReader(Paths.get(this.csvPath)),
                CSVFormat.DEFAULT.withHeader().withIgnoreHeaderCase().withTrim());

        // Get all headers in the CSV file so they can be used later when writing the file
        this.headerMap = parser.getHeaderMap();

        // Add them to the records list for later use
        this.allRecords = parser.getRecords();

        parser.close();

        reverseHeaderMap();
    } catch (IOException e) {
        log_.die(e);
    }
}

From source file:com.compomics.colims.distributed.playground.AnnotatedSpectraParser.java

/**
 * Parse the APL files for given aplKeys and put the peaks in the spectrumPeaks list.
 *//*w w  w  .  j a  v a2s  .  co  m*/
private void parseAplFile() throws IOException {
    for (Path aplFilePath : aplFilePaths.keySet()) {
        if (!Files.exists(aplFilePath)) {
            throw new FileNotFoundException(
                    "The apl spectrum file " + aplFilePath.toString() + " could not be found.");
        }
        try (BufferedReader bufferedReader = Files.newBufferedReader(aplFilePath)) {
            String line;
            Map<String, String> headers = new HashMap<>();

            while ((line = bufferedReader.readLine()) != null) {
                //look for a spectrum entry
                if (line.startsWith(APL_SPECTUM_START)) {
                    //go to the next line
                    line = bufferedReader.readLine();
                    //parse spectrum header part
                    while (!Character.isDigit(line.charAt(0))) {
                        String[] split = line.split(APL_HEADER_DELIMITER);
                        headers.put(split[0], split[1]);
                        line = bufferedReader.readLine();
                    }
                    //" Precursor: 0 _multi_" is removed before looking up the key in the spectra map
                    String header = org.apache.commons.lang3.StringUtils
                            .substringBefore(headers.get(APL_HEADER), " Precursor");
                    //check if the spectrum was identified and therefore can be found in the spectra map
                    if (aplKeys.contains(header)) {
                        List<Peak> peakList = new ArrayList<>();
                        while (!line.startsWith(APL_SPECTUM_END)) {
                            String[] splitLine = line.split(MaxQuantConstants.PARAM_TAB_DELIMITER.value());
                            Peak peak = new Peak(Double.parseDouble(splitLine[0]),
                                    Double.parseDouble(splitLine[1]));

                            peakList.add(peak);
                            line = bufferedReader.readLine();
                        }
                        spectrumPeaks.put(header, peakList);
                    }
                    //clear headers map
                    headers.clear();
                }
            }
        }
    }
}

From source file:org.opendatakit.briefcase.reused.UncheckedFiles.java

public static String readFirstLine(Path path) {
    try (BufferedReader reader = Files.newBufferedReader(path)) {
        return reader.readLine();
    } catch (IOException e) {
        throw new UncheckedIOException(e);
    }/*from   w ww. j a va  2 s . c o  m*/
}

From source file:org.wildfly.swarm.proc.Monitor.java

private CSVParser loadCSV(File file) throws Exception {
    Reader input = Files.newBufferedReader(file.toPath());
    return CSVFormat.DEFAULT.withHeader().parse(input);
}

From source file:com.collaborne.jsonschema.generator.driver.GeneratorDriver.java

/**
 * Create a {@link SchemaLoader} with the provided {@code rootUri} and {@code baseDirectory}.
 *
 * All schemas from {@code schemaFiles} are pre-loaded into the schema loader.
 *
 * @param rootUri/* www . j  av a2 s .c o m*/
 * @param baseDirectory
 * @param schemaFiles
 * @return
 * @throws IOException
 */
public SchemaLoader createSchemaLoader(URI rootUri, Path baseDirectory, List<Path> schemaFiles)
        throws IOException {
    URI baseDirectoryUri = baseDirectory.toAbsolutePath().normalize().toUri();

    // We're not adding a path redirection here, because that changes the path of the loaded schemas to the redirected location.
    // FIXME: This really looks like a bug in the SchemaLoader itself!
    URITranslatorConfiguration uriTranslatorConfiguration = URITranslatorConfiguration.newBuilder()
            .setNamespace(rootUri).freeze();

    LoadingConfigurationBuilder loadingConfigurationBuilder = LoadingConfiguration.newBuilder()
            .setURITranslatorConfiguration(uriTranslatorConfiguration);

    // ... instead, we use a custom downloader which executes the redirect
    Map<String, URIDownloader> downloaders = loadingConfigurationBuilder.freeze().getDownloaderMap();
    URIDownloader redirectingDownloader = new URIDownloader() {
        @Override
        public InputStream fetch(URI source) throws IOException {
            URI relativeSourceUri = rootUri.relativize(source);
            if (!relativeSourceUri.isAbsolute()) {
                // Apply the redirect
                source = baseDirectoryUri.resolve(relativeSourceUri);
            }

            URIDownloader wrappedDownloader = downloaders.get(source.getScheme());
            return wrappedDownloader.fetch(source);
        }
    };
    for (Map.Entry<String, URIDownloader> entry : downloaders.entrySet()) {
        loadingConfigurationBuilder.addScheme(entry.getKey(), redirectingDownloader);
    }

    JsonNodeReader reader = new JsonNodeReader(objectMapper);
    for (Path schemaFile : schemaFiles) {
        URI schemaFileUri = schemaFile.toAbsolutePath().normalize().toUri();
        URI relativeSchemaUri = baseDirectoryUri.relativize(schemaFileUri);
        URI schemaUri = rootUri.resolve(relativeSchemaUri);

        logger.info("{}: loading from {}", schemaUri, schemaFile);
        JsonNode schemaNode = reader.fromReader(Files.newBufferedReader(schemaFile));
        // FIXME: (upstream?): the preloaded map is accessed via the "real URI", so we need that one here as well
        //        This smells really wrong, after all we want all these to look like they came from rootUri()
        loadingConfigurationBuilder.preloadSchema(schemaFileUri.toASCIIString(), schemaNode);
    }

    return new SchemaLoader(loadingConfigurationBuilder.freeze());
}

From source file:org.roda.core.plugins.plugins.characterization.MediaInfoPlugin.java

private String extractFileName(Path nodeResult) throws ParserConfigurationException, IOException, SAXException {
    DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
    DocumentBuilder db = dbf.newDocumentBuilder();
    InputSource is = new InputSource();
    is.setCharacterStream(Files.newBufferedReader(nodeResult));
    Document doc = db.parse(is);/* w ww.  java2  s .  com*/
    NodeList nodes = doc.getElementsByTagName("Complete_name");
    return nodes.item(0).getTextContent();
}

From source file:sadl.input.TimedInput.java

/**
 * Parses timed sequences from a file in a custom format:
 * /*from  w w  w.j  a  v a  2 s  . c o m*/
 * @param in
 *            A {@link Path} that contains timed sequences in the appropriate alternative format
 * @param lineOffset
 *            The number of lines that will be skipped at the beginning of the file because they contain a header with meta data
 * @param seqPrefix
 *            A regular expression that matches the prefix of each sequence; after removing the prefix the line must begin with the first symbol
 *            {@link String}
 * @param seqPostfix
 *            A regular expression that matches the postfix of each sequence (until the regular expression in {@code classSep} appears); after removing the
 *            postfix the line must end with the last time delay.
 * @param pairSep
 *            A regular expression that matches the separator between two value pairs in a sequence; must not be a substring of {@code valueSep}
 * @param valueSep
 *            A regular expression that matches the separator between two values of a pair in a sequence
 * @param classSep
 *            A regular expression that matches the separator between the sequence and the optional class label of a sequence; must not be a substring of
 *            {@code pairSep}
 * @return A {@link TimedInput} that represents the timed sequences parsed
 * @throws IOException
 */
public static TimedInput parseCustom(Path in, int lineOffset, String seqPrefix, String seqPostfix,
        String pairSep, String valueSep, String classSep) throws IOException {
    if (Files.notExists(in)) {
        logger.warn("File {} was not found.", in);
        throw new FileNotFoundException("input file on path " + in.toAbsolutePath() + " was not found");
    }
    try (BufferedReader br = Files.newBufferedReader(in)) {
        return parseCustom(br, lineOffset, seqPrefix, seqPostfix, pairSep, valueSep, classSep);
    }
}

From source file:org.codice.ddf.admin.core.impl.SystemPropertiesAdmin.java

private void writeOutUsersDotAttributesFile(File userAttributesFile) {
    Map<String, Object> json = null;
    try (BufferedReader br = Files.newBufferedReader(Paths.get(userAttributesFile.toURI()))) {
        json = GSON.fromJson(br, MAP_STRING_TO_OBJECT_TYPE);
    } catch (IOException e) {
        LOGGER.warn("Unable to read system user attribute file for hostname update.", e);
        return;//from ww  w .  j  av  a2 s .com
    }

    addGuestClaimsProfileAttributes(json);

    if (json.containsKey(oldHostName)) {
        Properties systemDotProperties = null;
        try {
            systemDotProperties = new Properties(systemPropertiesFile);
            json.put(systemDotProperties.get(SystemBaseUrl.INTERNAL_HOST), json.remove(oldHostName));
        } catch (IOException e) {
            LOGGER.warn("Exception while reading the system.properties file.", e);
        }
    }

    try {
        for (Map.Entry<String, Object> entry : json.entrySet()) {
            json.put(entry.getKey(), replaceLocalhost(entry.getValue()));
        }
        FileUtils.writeStringToFile(userAttributesFile, GSON.toJson(json), Charset.defaultCharset());
    } catch (IOException e) {
        LOGGER.warn("Unable to write user attribute file for system update.", e);
    }
}