Example usage for java.nio.file Files newBufferedReader

List of usage examples for java.nio.file Files newBufferedReader

Introduction

In this page you can find the example usage for java.nio.file Files newBufferedReader.

Prototype

public static BufferedReader newBufferedReader(Path path, Charset cs) throws IOException 

Source Link

Document

Opens a file for reading, returning a BufferedReader that may be used to read text from the file in an efficient manner.

Usage

From source file:org.jboss.as.test.integration.logging.profiles.NonExistingProfileTestCase.java

@Test
public void defaultLoggingTest() throws IOException {
    final String msg = "defaultLoggingTest: This is a test message";
    final int statusCode = getResponse(msg);
    assertTrue("Invalid response statusCode: " + statusCode, statusCode == HttpStatus.SC_OK);
    // check logs
    boolean logFound = false;
    try (final BufferedReader br = Files.newBufferedReader(loggingTestLog, StandardCharsets.UTF_8)) {
        String line;//from  ww  w.  j  a v  a  2  s .  co m
        while ((line = br.readLine()) != null) {
            if (line.contains(msg)) {
                logFound = true;
                break;
            }
        }
    }
    Assert.assertTrue(logFound);
}

From source file:com.ignorelist.kassandra.steam.scraper.Configuration.java

public static Configuration fromPropertiesFile(Path propertiesFile) throws IOException {
    Reader propertiesReader = Files.newBufferedReader(propertiesFile, Charsets.UTF_8);
    try {//from w  ww  . jav a 2s.  co  m
        Properties properties = new Properties();
        properties.load(propertiesReader);
        return fromProperties(properties);
    } finally {
        IOUtils.closeQuietly(propertiesReader);
    }
}

From source file:net.sourceforge.pmd.lang.java.rule.errorprone.AvoidDuplicateLiteralsRule.java

private LineNumberReader getLineReader() throws IOException {
    return new LineNumberReader(
            Files.newBufferedReader(getProperty(EXCEPTION_FILE_DESCRIPTOR).toPath(), StandardCharsets.UTF_8));
}

From source file:nextflow.fs.dx.DxFileSystemProvider.java

/**
 * Find out the default DnaNexus project id in the specified configuration file
 *
 * @return The string value/* w  w  w . j a  v a  2 s. c o m*/
 */
static String getContextIdByConfig(File config) {

    StringBuilder buffer = new StringBuilder();
    try {
        BufferedReader reader = Files.newBufferedReader(config.toPath(), Charset.defaultCharset());
        String line;
        while ((line = reader.readLine()) != null) {
            buffer.append(line).append('\n');
        }

        JsonNode object = DxJson.parseJson(buffer.toString());
        return object.get("DX_PROJECT_CONTEXT_ID").textValue();
    } catch (FileNotFoundException e) {
        throw new IllegalStateException(String.format(
                "Unable to load DnaNexus configuration file: %s -- cannot configure file system", config), e);
    } catch (IOException e) {
        throw new IllegalStateException("Unable to configure DnaNexus file system", e);
    }
}

From source file:org.apache.kylin.monitor.QueryParser.java

public void parseQueryLog(String filePath, String dPath) throws ParseException, IOException {

    logger.info("Start parsing file " + filePath + " !");

    //        writer config init
    FileSystem fs = this.getHdfsFileSystem();
    org.apache.hadoop.fs.Path resultStorePath = new org.apache.hadoop.fs.Path(dPath);
    OutputStreamWriter writer = new OutputStreamWriter(fs.append(resultStorePath));
    CSVWriter cwriter = new CSVWriter(writer, '|', CSVWriter.NO_QUOTE_CHARACTER);

    SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss,SSS");
    Pattern p_query_start = Pattern.compile("^\\[.*\\]:\\[(.*),.*\\]\\[.*\\]\\[.*QueryService.logQuery.*\\].*");
    Pattern p_query_end = Pattern.compile("^Message:(.*)$");
    Pattern p_query_body = Pattern.compile(
            "^\\[.*\\]:\\[((\\d{4}-\\d{2}-\\d{2}).*)\\]\\[.*\\]\\[.*\\].*\n^=+\\[QUERY\\]=+\n^SQL:(.*)\n^User:(.*)\n^Success:(.*)\n^Duration:(.*)\n^Project:(.*)\n^(Realization Names|Cube Names): \\[(.*)\\]\n^Cuboid Ids: \\[(.*)\\]\n^Total scan count:(.*)\n^Result row count:(.*)\n^Accept Partial:(.*)\n(^Is Partial Result:(.*)\n)?^Hit Cache:(.*)\n^Message:(.*)",
            Pattern.MULTILINE);//from   w  ww.  j av  a 2 s  .  c  om
    Matcher m_query_start = p_query_start.matcher("");
    Matcher m_query_end = p_query_end.matcher("");
    Matcher m_query_body = p_query_body.matcher("");

    boolean query_start = false;
    StringBuffer query_body = new StringBuffer("");
    Path path = Paths.get(filePath);
    try {
        BufferedReader reader = Files.newBufferedReader(path, ENCODING);
        String line = null;
        while ((line = reader.readLine()) != null) {
            m_query_start.reset(line); //reset the input
            m_query_end.reset(line);

            // set start flag ,clear StringBuffer
            if (m_query_start.find()) {
                query_start = true;
                query_body = new StringBuffer("");
            }
            if (query_start) {
                query_body.append(line + "\n");
            }
            if (m_query_end.find()) {
                query_start = false;
                m_query_body.reset(query_body);
                logger.info("parsing query...");
                logger.info(query_body);
                //                    skip group(8) and group(14)
                if (m_query_body.find()) {
                    ArrayList<String> groups = new ArrayList<String>();
                    int grp_count = m_query_body.groupCount();
                    for (int i = 1; i <= grp_count; i++) {
                        if (i != 8 && i != 14) {
                            String grp_item = m_query_body.group(i);
                            grp_item = grp_item == null ? "" : grp_item.trim();
                            groups.add(grp_item);
                        }
                    }

                    long start_time = format.parse(groups.get(0)).getTime()
                            - (int) (Double.parseDouble(groups.get(5)) * 1000);
                    groups.set(0, format.format(new Date(start_time)));
                    groups.add(DEPLOY_ENV);
                    String[] recordArray = groups.toArray(new String[groups.size()]);
                    //                        write to hdfs
                    cwriter.writeNext(recordArray);

                }

            }

        }
    } catch (IOException ex) {
        logger.info("Failed to write to hdfs:", ex);
    } finally {
        if (writer != null) {
            writer.close();
        }
        if (cwriter != null) {
            cwriter.close();
        }
        if (fs != null) {
            fs.close();
        }
    }

    logger.info("Finish parsing file " + filePath + " !");

}

From source file:org.apache.tika.cli.TikaCLIBatchIntegrationTest.java

@Test
public void testDigester() throws Exception {
    /*//w ww  .  ja va 2  s  . co m
            try {
    String[] params = {"-i", escape(testDataFile.getAbsolutePath()),
            "-o", escape(tempOutputDir.getAbsolutePath()),
            "-numConsumers", "10",
            "-J", //recursive Json
            "-t" //plain text in content
    };
    TikaCLI.main(params);
    reader = new InputStreamReader(
            new FileInputStream(new File(tempOutputDir, "test_recursive_embedded.docx.json")), UTF_8);
    List<Metadata> metadataList = JsonMetadataList.fromJson(reader);
    assertEquals(12, metadataList.size());
    assertEquals("59f626e09a8c16ab6dbc2800c685f772", metadataList.get(0).get("X-TIKA:digest:MD5"));
    assertEquals("22e6e91f408d018417cd452d6de3dede", metadataList.get(5).get("X-TIKA:digest:MD5"));
            } finally {
    IOUtils.closeQuietly(reader);
            }
    */
    String[] params = { "-i", testInputDirForCommandLine, "-o", tempOutputDirForCommandLine, "-numConsumers",
            "10", "-J", //recursive Json
            "-t", //plain text in content
            "-digest", "sha512" };
    TikaCLI.main(params);
    Path jsonFile = tempOutputDir.resolve("test_recursive_embedded.docx.json");
    try (Reader reader = Files.newBufferedReader(jsonFile, UTF_8)) {

        List<Metadata> metadataList = JsonMetadataList.fromJson(reader);
        assertEquals(12, metadataList.size());
        assertNotNull(metadataList.get(0).get("X-TIKA:digest:SHA512"));
        assertTrue(metadataList.get(0).get("X-TIKA:digest:SHA512").startsWith("ee46d973ee1852c01858"));
    }
}

From source file:popgenutils.dfcp.PrepareVCF4DFCP.java

/**
 * //w ww  .  j  ava  2  s  . c  o  m
 */
private void filterpop() {
    Set<Integer> indices_to_keep = new HashSet<Integer>();
    Map<String, String> sample_to_pop = new HashMap<String, String>();
    Map<String, String> sample_to_superpop = new HashMap<String, String>();
    Set<String> pops_to_keep = new HashSet<String>();

    for (int i = 0; i < 9; i++) {
        indices_to_keep.add(i);
    }

    String[] popsparts = popstokeep.split(",");
    for (String pop : popsparts) {
        pops_to_keep.add(pop);
    }

    try (BufferedReader in = Files.newBufferedReader(Paths.get(popmappingfile), Charset.forName("UTF-8"))) {
        String line = null;
        while ((line = in.readLine()) != null) {
            String[] parts = line.split("\t");
            sample_to_pop.put(parts[0], parts[1]);
            sample_to_superpop.put(parts[0], parts[2]);
        }
    } catch (IOException e) {
        e.printStackTrace();
    }

    StringBuilder header = new StringBuilder();
    try (BufferedReader in = Files.newBufferedReader(Paths.get(filename), Charset.forName("UTF-8"))) {
        BufferedWriter out = null;

        String line = null;
        while ((line = in.readLine()) != null) {

            if (line.startsWith("#CHROM")) {
                //samples begin at 9
                out = Files.newBufferedWriter(
                        Paths.get(output_dir + "/" + "popfilter_" + Paths.get(filename).getFileName()),
                        Charset.forName("UTF-8"));
                out.write(header.toString());
                String[] parts = line.split("\t");
                for (int i = 9; i < parts.length; i++) {
                    if (pops_to_keep.contains(sample_to_superpop.get(parts[i])))
                        indices_to_keep.add(i);
                }
                out.write(parts[0]);
                for (int i = 1; i < parts.length; i++) {
                    if (indices_to_keep.contains(i))
                        out.write("\t" + parts[i]);
                }
                out.write(System.getProperty("line.separator"));
            } else if (line.startsWith("#")) {
                header.append(line + System.getProperty("line.separator"));
            } else {
                // format at 8
                String[] parts = line.split("\t");
                out.write(parts[0]);
                for (int i = 1; i < parts.length; i++) {
                    if (indices_to_keep.contains(i))
                        out.write("\t" + parts[i]);
                }
                out.write(System.getProperty("line.separator"));

            }
        }
        out.close();
    } catch (IOException e) {
        System.err.println("could not read from file " + filename);
        e.printStackTrace();
    }
}

From source file:org.mda.bcb.tcgagsdata.create.Metadata.java

public TreeSet<String> getDiseaseSampleData(File theFile, boolean theBarcodeFlag) throws IOException {
    TreeSet<String> results = new TreeSet<>();
    try (BufferedReader br = Files.newBufferedReader(Paths.get(theFile.getAbsolutePath()),
            Charset.availableCharsets().get("ISO-8859-1"))) {
        for (String line = br.readLine(); null != line; line = br.readLine()) {
            String[] splitted = line.split("\t", -1);
            for (int index = 1; index < splitted.length; index++) {
                if (true == theBarcodeFlag) {
                    results.add(splitted[index]);
                } else {
                    results.add(trimToPatientId(splitted[index]));
                }//from  ww w.ja  v  a  2s.  co m
            }
        }
    }
    return results;
}

From source file:de.bps.onyx.plugin.OnyxModule.java

public static ResourceEvaluation isOnyxTest(File file, String filename) {
    ResourceEvaluation eval = new ResourceEvaluation();
    BufferedReader reader = null;
    try {/*from w  w  w. ja  va 2  s. c  om*/
        ImsManifestFileFilter visitor = new ImsManifestFileFilter();
        Path fPath = PathUtils.visit(file, filename, visitor);
        if (visitor.isValid()) {
            Path qtiPath = fPath.resolve("imsmanifest.xml");
            reader = Files.newBufferedReader(qtiPath, StandardCharsets.UTF_8);
            while (reader.ready()) {
                String l = reader.readLine();
                if (l.indexOf("imsqti_xmlv2p1") != -1 || l.indexOf("imsqti_test_xmlv2p1") != -1
                        || l.indexOf("imsqti_assessment_xmlv2p1") != -1) {
                    eval.setValid(true);
                    break;
                }
            }
        } else {
            eval.setValid(false);
        }
    } catch (NoSuchFileException nsfe) {
        eval.setValid(false);
    } catch (IOException | IllegalArgumentException e) {
        log.error("", e);
        eval.setValid(false);
    } finally {
        IOUtils.closeQuietly(reader);
    }
    return eval;
}

From source file:org.jboss.as.test.integration.logging.operations.CustomFormattersTestCase.java

@Test
public void testUsage() throws Exception {

    // Create the custom formatter
    CompositeOperationBuilder builder = CompositeOperationBuilder.create();
    ModelNode op = Operations.createAddOperation(CUSTOM_FORMATTER_ADDRESS);
    op.get("class").set("java.util.logging.XMLFormatter");
    // the module doesn't really matter since it's a JDK, so we'll just use the jboss-logmanager.
    op.get("module").set("org.jboss.logmanager");
    builder.addStep(op);/*from   w ww  .  ja v  a  2s .  c  o  m*/

    // Create the handler
    op = Operations.createAddOperation(HANDLER_ADDRESS);
    final ModelNode file = op.get("file");
    file.get("relative-to").set("jboss.server.log.dir");
    file.get("path").set(FILE_NAME);
    op.get("append").set(false);
    op.get("autoflush").set(true);
    op.get("named-formatter").set(CUSTOM_FORMATTER_NAME);
    builder.addStep(op);

    // Add the handler to the root logger
    op = Operations.createOperation("add-handler", ROOT_LOGGER_ADDRESS);
    op.get(ModelDescriptionConstants.NAME).set(HANDLER_NAME);
    builder.addStep(op);

    executeOperation(builder.build());

    // Get the log file
    op = Operations.createOperation("resolve-path", HANDLER_ADDRESS);
    ModelNode result = executeOperation(op);
    final Path logFile = Paths.get(Operations.readResult(result).asString());

    // The file should exist
    Assert.assertTrue("The log file was not created.", Files.exists(logFile));

    // Log 5 records
    doLog("Test message: ", 5);

    // Read the log file
    try (BufferedReader reader = Files.newBufferedReader(logFile, StandardCharsets.UTF_8)) {
        final Pattern pattern = Pattern.compile("^(<message>)+(Test message: \\d)+(</message>)$");
        final List<String> messages = new ArrayList<>(5);
        String line;
        while ((line = reader.readLine()) != null) {
            final String trimmedLine = line.trim();
            final Matcher m = pattern.matcher(trimmedLine);
            // Very simple xml parsing
            if (m.matches()) {
                messages.add(m.group(2));
            }
        }

        // Should be 5 messages
        Assert.assertEquals(5, messages.size());
        // Check each message
        int count = 0;
        for (String msg : messages) {
            Assert.assertEquals("Test message: " + count++, msg);
        }
    }

    builder = CompositeOperationBuilder.create();
    // Remove the handler from the root-logger
    op = Operations.createOperation("remove-handler", ROOT_LOGGER_ADDRESS);
    op.get(ModelDescriptionConstants.NAME).set(HANDLER_NAME);
    builder.addStep(op);

    // Remove the custom formatter
    op = Operations.createRemoveOperation(CUSTOM_FORMATTER_ADDRESS);
    builder.addStep(op);

    // Remove the handler
    op = Operations.createRemoveOperation(HANDLER_ADDRESS);
    builder.addStep(op);

    executeOperation(builder.build());

    // So we don't pollute other, verify the formatter and handler have been removed
    op = Operations.createReadAttributeOperation(ROOT_LOGGER_ADDRESS, "handlers");
    result = executeOperation(op);
    // Should be a list type
    final List<ModelNode> handlers = Operations.readResult(result).asList();
    for (ModelNode handler : handlers) {
        Assert.assertNotEquals(CUSTOM_FORMATTER_NAME, handler.asString());
    }
    verifyRemoved(CUSTOM_FORMATTER_ADDRESS);
    verifyRemoved(HANDLER_ADDRESS);

    // Delete the log file
    Files.delete(logFile);
    // Ensure it's been deleted
    Assert.assertFalse(Files.exists(logFile));
}