Example usage for java.nio.file Files newBufferedReader

List of usage examples for java.nio.file Files newBufferedReader

Introduction

In this page you can find the example usage for java.nio.file Files newBufferedReader.

Prototype

public static BufferedReader newBufferedReader(Path path, Charset cs) throws IOException 

Source Link

Document

Opens a file for reading, returning a BufferedReader that may be used to read text from the file in an efficient manner.

Usage

From source file:edu.pitt.dbmi.ccd.db.service.UserAccountServiceTest.java

@Ignore
@Test//from   ww w  . j av a2  s  .  co m
public void testMigration() throws ParseException {
    System.out.println("testMigration");

    DateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");

    Path dataFile = Paths.get("data", "data.txt");
    try (BufferedReader reader = Files.newBufferedReader(dataFile, Charset.defaultCharset())) {
        reader.readLine();
        for (String line = reader.readLine(); line != null; line = reader.readLine()) {
            line = line.trim();
            String[] fields = line.split(";");
            Date createdDate = df.parse(fields[0].trim());
            Date lastLoginDate = df.parse(fields[1].trim());
            String password = fields[2].trim();
            String username = fields[3].trim();
            String email = fields[4].trim();
            String firstName = fields[5].trim();
            String lastName = fields[6].trim();
            String workspace = fields[7].trim();

            Person person = new Person(firstName, lastName, email, workspace);
            UserAccount userAccount = new UserAccount(person, username, password, true,
                    "61b2b10c-9d3a-11e6-8c7a-38c9860967a0", createdDate, lastLoginDate);
            userAccountService.save(userAccount);
        }
    } catch (IOException exception) {
        exception.printStackTrace(System.err);
    }
}

From source file:org.ng200.openolympus.FileAccess.java

public static BufferedReader newBufferedReader(final Path path, final Charset charset) throws IOException {
    return Files.newBufferedReader(path, charset);
}

From source file:org.jboss.as.test.manualmode.logging.Log4jAppenderTestCase.java

private void searchLog(final String msg, final boolean expected) throws Exception {
    final int statusCode = getResponse(msg);
    Assert.assertTrue("Invalid response statusCode: " + statusCode, statusCode == HttpStatus.SC_OK);
    try (final BufferedReader reader = Files.newBufferedReader(logFile, StandardCharsets.UTF_8)) {
        String line;/*w  w w  .j a  v a 2 s .  c o m*/
        boolean logFound = false;

        while ((line = reader.readLine()) != null) {
            if (line.contains(msg)) {
                logFound = true;
                break;
            }
        }
        Assert.assertTrue(logFound == expected);
    }
}

From source file:eu.crowdrec.flume.plugins.source.DirectIdomaarSource.java

private BufferedReader createReader() {
    try {// ww  w.j a v  a2 s. c o  m
        if (url != null)
            return createReaderFromUrl();
        File inputFile = new File(fileName);
        if (format.equals("plain")) {
            return Files.newBufferedReader(inputFile.toPath(), charset);
        } else if (format.equals("gzip")) {
            InputStream fileStream = new FileInputStream(inputFile);
            InputStream gzipStream = new GZIPInputStream(fileStream);
            Reader decoder = new InputStreamReader(gzipStream, "UTF-8");
            return new BufferedReader(decoder);
        }
    } catch (IOException exception) {
        throw new RuntimeException(exception);
    }
    throw new RuntimeException(
            "Unrecognized settings in DirectIdomaarSource: cannot figure out input source location.");
}

From source file:generic.GenericModelBuilder.java

/**
 * @return//from  w  ww .ja  va2s .  c om
 */
private Pair<List<GenericRow>, String[]> readData() {
    List<ColumnSpec> cols = spec.getColumns();
    StringBuilder report = new StringBuilder();
    try (BufferedReader r = Files.newBufferedReader(new File(spec.getDataSourcePath()).toPath(),
            Charset.forName("UTF-8"))) {
        String header = r.readLine();
        String[] columns = header.split(spec.getDelimiter());
        String line;
        List<GenericRow> result = new ArrayList<>();
        for (int line_i = 0; ((line = r.readLine()) != null); ++line_i) {
            String[] vals = line.split(spec.getDelimiter());
            Object[] data = new Object[spec.getColumns().size()];
            for (int i = 0; i < cols.size(); ++i) {
                try {
                    data[i] = cols.get(i).parse(vals);
                } catch (NumberFormatException e) {
                    log.error("can't parse: " + line_i + " col " + cols.get(i).col, e);
                    report.append(line_i).append("/").append(cols.get(i).col).append(": parsing error: ")
                            .append(e.getMessage()).append('\n');
                    data[i] = null;
                }
            }
            result.add(new GenericRow(data));
        }
        if (report.length() > 0) {
            ErrorDialog.openError(null, "Parsing Errors", "Loading Error",
                    new Status(IStatus.ERROR, "GenericModelBuilder", report.toString()));
        }
        return Pair.make(result, columns);
    } catch (IOException e) {
        ErrorDialog.openError(null, "Error during loading", "Loading Error",
                new Status(IStatus.ERROR, "GenericModelBuilder", e.getMessage(), e));
        log.error("can't parse", e);
    }
    return Pair.make(Collections.<GenericRow>emptyList(), null);
}

From source file:org.omegat.util.FileUtil.java

public static String getEOL(File file, Charset charset) throws IOException {
    String r = null;/*from   w  w  w . ja va2 s .com*/
    try (BufferedReader in = Files.newBufferedReader(file.toPath(), charset)) {
        while (true) {
            int ch = in.read();
            if (ch < 0) {
                break;
            }
            if (ch == '\n' || ch == '\r') {
                r = Character.toString((char) ch);
                int ch2 = in.read();
                if (ch2 == '\n' || ch2 == '\r') {
                    r += Character.toString((char) ch2);
                }
                break;
            }
        }
    }
    return r;
}

From source file:com.vaushell.superpipes.tools.scribe.OAuthClient.java

private static Token loadToken(final Path path) throws IOException {
    if (path == null) {
        throw new IllegalArgumentException();
    }/* w  w  w. j  a  va 2  s  .c  o  m*/

    if (LOGGER.isTraceEnabled()) {
        LOGGER.trace("[" + OAuthClient.class.getSimpleName() + "] loadToken() : path=" + path);
    }

    if (Files.notExists(path)) {
        return null;
    }

    try (final BufferedReader bfr = Files.newBufferedReader(path, Charset.forName("utf-8"))) {
        final String token = bfr.readLine();
        final String secret = bfr.readLine();
        final String raw = bfr.readLine();

        if (raw == null) {
            return new Token(token, secret);
        } else {
            return new Token(token, secret, raw);
        }
    }
}

From source file:org.apache.kylin.monitor.ApiRequestParser.java

public void parseRequestLog(String filePath, String dPath) throws ParseException, IOException {

    logger.info("Start parsing kylin api request file " + filePath + " !");

    // writer config init
    FileSystem fs = this.getHdfsFileSystem();
    org.apache.hadoop.fs.Path resultStorePath = new org.apache.hadoop.fs.Path(dPath);
    OutputStreamWriter writer = new OutputStreamWriter(fs.append(resultStorePath));
    CSVWriter cwriter = new CSVWriter(writer, '|', CSVWriter.NO_QUOTE_CHARACTER);

    Pattern p_available = Pattern.compile("/kylin/api/(cubes|user)+.*");
    Pattern p_request = Pattern.compile(
            "^.*\\[.*KylinApiFilter.logRequest.*\\].*REQUEST:.*REQUESTER=(.*);REQ_TIME=(\\w+ (\\d{4}-\\d{2}-\\d{2}).*);URI=(.*);METHOD=(.*);QUERY_STRING=(.*);PAYLOAD=(.*);RESP_STATUS=(.*);$");
    Pattern p_uri = Pattern.compile("/kylin/api/(\\w+)(/.*/)*(.*)$");
    Matcher m_available = p_available.matcher("");
    Matcher m_request = p_request.matcher("");
    Matcher m_uri = p_uri.matcher("");

    Path path = Paths.get(filePath);
    try {/*www . ja  v  a2s.  com*/
        BufferedReader reader = Files.newBufferedReader(path, ENCODING);
        String line = null;
        while ((line = reader.readLine()) != null) {
            // reset the input
            m_available.reset(line);
            m_request.reset(line);

            // filter unnecessary info
            if (m_available.find()) {
                // filter GET info
                if (m_request.find() && !m_request.group(5).equals("GET")) {

                    List<String> groups = new ArrayList<String>();
                    for (int i = 1; i <= m_request.groupCount(); i++) {
                        groups.add(m_request.group(i));
                    }

                    String uri = m_request.group(4);
                    m_uri.reset(uri);
                    if (m_uri.find()) {

                        // add target
                        groups.add(m_uri.group(1));

                        // add action
                        if (m_uri.group(1).equals("cubes")) {
                            String method = m_request.group(5);
                            if ("DELETE".equals(method)) {
                                groups.add("drop");
                            } else if ("POST".equals(method)) {
                                groups.add("save");
                            } else {
                                // add parse action
                                groups.add(m_uri.group(3));
                            }
                        }
                    }
                    groups.add(DEPLOY_ENV);
                    String[] recordArray = groups.toArray(new String[groups.size()]);
                    // write to hdfs
                    cwriter.writeNext(recordArray);
                }
            }

        }
    } catch (IOException ex) {
        logger.info("Failed to write to hdfs:", ex);
    } finally {
        writer.close();
        cwriter.close();
        fs.close();
    }

    logger.info("Finish parsing file " + filePath + " !");
}

From source file:io.seqware.pipeline.plugins.FileProvenanceQueryTool.java

@Override
public ReturnValue do_run() {
    Path randomTempDirectory = null;
    Path originalReport = null;/*from w w  w.  j  a v a  2 s.c  om*/
    Path bulkImportFile = null;
    try {
        if (options.has(this.inFileSpec)) {
            originalReport = FileSystems.getDefault().getPath(options.valueOf(inFileSpec));
        } else {
            originalReport = populateOriginalReportFromWS();
        }

        List<String> headers;
        List<Boolean> numericDataType;
        // construct column name and datatypes
        // convert file provenance report into derby bulk load format
        try (BufferedReader originalReader = Files.newBufferedReader(originalReport,
                Charset.defaultCharset())) {
            // construct column name and datatypes
            String headerLine = originalReader.readLine();
            headers = Lists.newArrayList();
            numericDataType = Lists.newArrayList();
            for (String column : headerLine.split("\t")) {
                String editedColumnName = StringUtils.lowerCase(column).replaceAll(" ", "_").replaceAll("-",
                        "_");
                headers.add(editedColumnName);
                // note that Parent Sample SWID is a silly column that has colons in it
                numericDataType.add(
                        !editedColumnName.contains("parent_sample") && (editedColumnName.contains("swid")));
            }
            bulkImportFile = Files.createTempFile("import", "txt");
            try (BufferedWriter derbyImportWriter = Files.newBufferedWriter(bulkImportFile,
                    Charset.defaultCharset())) {
                Log.debug("Bulk import file written to " + bulkImportFile.toString());
                while (originalReader.ready()) {
                    String line = originalReader.readLine();
                    StringBuilder builder = new StringBuilder();
                    int i = 0;
                    for (String colValue : line.split("\t")) {
                        if (i != 0) {
                            builder.append("\t");
                        }
                        if (numericDataType.get(i)) {
                            if (!colValue.trim().isEmpty()) {
                                builder.append(colValue);
                            }
                        } else {
                            // assume that this is a string
                            // need to double quotes to preserve them, see
                            // https://db.apache.org/derby/docs/10.4/tools/ctoolsimportdefaultformat.html
                            builder.append("\"").append(colValue.replaceAll("\"", "\"\"")).append("\"");
                        }
                        i++;
                    }
                    derbyImportWriter.write(builder.toString());
                    derbyImportWriter.newLine();
                }
            }
        }
        randomTempDirectory = Files.createTempDirectory("randomFileProvenanceQueryDir");

        // try using in-memory for better performance
        String protocol = "jdbc:h2:";
        if (options.has(useH2InMemorySpec)) {
            protocol = protocol + "mem:";
        }
        Connection connection = spinUpEmbeddedDB(randomTempDirectory, "org.h2.Driver", protocol);

        // drop table if it exists already (running in IDE?)
        Statement dropTableStatement = null;
        try {
            dropTableStatement = connection.createStatement();
            dropTableStatement.executeUpdate("DROP TABLE " + TABLE_NAME);
        } catch (SQLException e) {
            Log.debug("Report table didn't exist (normal)");
        } finally {
            DbUtils.closeQuietly(dropTableStatement);
        }

        // create table creation query
        StringBuilder tableCreateBuilder = new StringBuilder();
        // tableCreateBuilder
        tableCreateBuilder.append("CREATE TABLE " + TABLE_NAME + " (");
        for (int i = 0; i < headers.size(); i++) {
            if (i != 0) {
                tableCreateBuilder.append(",");
            }
            if (numericDataType.get(i)) {
                tableCreateBuilder.append(headers.get(i)).append(" INT ");
            } else {
                tableCreateBuilder.append(headers.get(i)).append(" VARCHAR ");
            }
        }
        tableCreateBuilder.append(")");

        bulkImportH2(tableCreateBuilder, connection, bulkImportFile);

        // query the database and dump the results to
        try (BufferedWriter outputWriter = Files.newBufferedWriter(Paths.get(options.valueOf(outFileSpec)),
                Charset.defaultCharset(), StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING)) {
            // query the database and dump the results to
            QueryRunner runner = new QueryRunner();
            List<Map<String, Object>> mapList = runner.query(connection, options.valueOf(querySpec),
                    new MapListHandler());
            // output header
            if (mapList.isEmpty()) {
                Log.fatal("Query had no results");
                System.exit(-1);
            }
            StringBuilder builder = new StringBuilder();
            for (String columnName : mapList.get(0).keySet()) {
                if (builder.length() != 0) {
                    builder.append("\t");
                }
                builder.append(StringUtils.lowerCase(columnName));
            }
            outputWriter.append(builder);
            outputWriter.newLine();
            for (Map<String, Object> rowMap : mapList) {
                StringBuilder rowBuilder = new StringBuilder();
                for (Entry<String, Object> e : rowMap.entrySet()) {
                    if (rowBuilder.length() != 0) {
                        rowBuilder.append("\t");
                    }
                    rowBuilder.append(e.getValue());
                }
                outputWriter.append(rowBuilder);
                outputWriter.newLine();
            }
        }
        DbUtils.closeQuietly(connection);
        Log.stdoutWithTime("Wrote output to " + options.valueOf(outFileSpec));
        return new ReturnValue();
    } catch (IOException | SQLException | ClassNotFoundException | InstantiationException
            | IllegalAccessException ex) {
        throw new RuntimeException(ex);
    } finally {
        if (originalReport != null) {
            FileUtils.deleteQuietly(originalReport.toFile());
        }
        if (bulkImportFile != null) {
            FileUtils.deleteQuietly(bulkImportFile.toFile());
        }
        if (randomTempDirectory != null && randomTempDirectory.toFile().exists()) {
            FileUtils.deleteQuietly(randomTempDirectory.toFile());
        }

    }
}

From source file:io.personium.core.model.impl.fs.DavMetadataFile.java

/**
 * load from the file./*from w  w w.ja v a 2  s.  c o m*/
 */
private void doLoad() throws PersoniumCoreException {
    try (Reader reader = Files.newBufferedReader(file.toPath(), Charsets.UTF_8)) {
        JSONParser parser = new JSONParser();
        this.json = (JSONObject) parser.parse(reader);
    } catch (IOException | ParseException e) {
        // IO failure or JSON is broken
        throw PersoniumCoreException.Dav.DAV_INCONSISTENCY_FOUND.reason(e);
    }
}