Example usage for java.nio.file Files lines

List of usage examples for java.nio.file Files lines

Introduction

In this page you can find the example usage for java.nio.file Files lines.

Prototype

public static Stream<String> lines(Path path) throws IOException 

Source Link

Document

Read all lines from a file as a Stream .

Usage

From source file:com.gitpitch.services.OfflineService.java

private void fetchOnlineAssets(PitchParams pp, Path zipRoot) {

    List<String> assetUrls = null;

    Path mdOnlinePath = zipRoot.resolve(PITCHME_ONLINE_PATH);
    File mdOnlineFile = mdOnlinePath.toFile();

    if (mdOnlineFile.exists()) {

        MarkdownModel markdownModel = (MarkdownModel) markdownModelFactory.create(null);

        try (Stream<String> stream = Files.lines(mdOnlinePath)) {

            assetUrls = stream.map(md -> {
                return markdownModel.offlineAssets(md);
            }).collect(Collectors.toList());

            log.debug("fetchOnlineAssets: assetUrls={}", assetUrls);

            Path zipMdAssetsPath = zipRoot.resolve(ZIP_MD_ASSETS_DIR);
            zipMdAssetsPath = diskService.ensure(zipMdAssetsPath);

            List<String> fetched = new ArrayList<String>();

            for (String assetUrl : assetUrls) {
                if (assetUrl != null && !fetched.contains(assetUrl)) {
                    diskService.download(pp, zipMdAssetsPath, assetUrl, FilenameUtils.getName(assetUrl),
                            grsManager.get(pp).getHeaders());
                    fetched.add(assetUrl);
                }//from  w w w.  j a v a  2 s  .  c  o  m
            }

        } catch (Exception mex) {
            log.warn("fetchOnlineAssets: ex={}", mex);
        }

    } else {
        log.warn("fetchOnlineAssets: mdOnline not found={}", mdOnlineFile);
    }
}

From source file:no.imr.stox.functions.utils.RUtils.java

/**
 * calculate accurate polygons by use of r in a separate process.
 *
 * @param polygons/*w w w.  ja va2s  .c  om*/
 * @return
 */
static public PolygonAreaMatrix getAccuratePolygons(String rFolder, MatrixBO polygons,
        String tempRScriptFileName) {
    PolygonAreaMatrix res = new PolygonAreaMatrix();
    if (tempRScriptFileName == null) {
        tempRScriptFileName = "area.txt";
    }
    String fileName = getTmpDir() + tempRScriptFileName;
    try (PrintWriter pw = new PrintWriter(fileName)) {
        pw.println("Sys.setenv(JAVA_HOME = \"\")");
        pw.println("library(Rstox)");
        //            pw.println("source('" + ProjectUtils.getSystemRFolder() + "/rstox_spatial.r" + "')");
        pw.print("pol <- cbind(");
        List<String> rowKeys = polygons.getRowKeys();
        for (int i = 0; i < rowKeys.size(); i++) {
            String strata = rowKeys.get(i);
            //polygons.getRowKeys()
            pw.print("c('" + strata + "', '"
                    + getPolygonWKT((Geometry) polygons.getRowColValue(strata, Functions.COL_POLVAR_POLYGON))
                    + "')");
            if (i < rowKeys.size() - 1) {
                pw.print(",");
            } else {
                pw.print(")");
            }
            pw.println();
        }
        pw.println("invisible(apply(cbind(cbind(pol[1,], apply(cbind(pol[2,]), MARGIN=1,");
        pw.println("FUN=function(p) polyArea(p)))), MARGIN=1,");
        pw.println("FUN=function(x) cat(x[1],':', x[2],sep='', ';')))");
        pw.println("quit()");
    } catch (FileNotFoundException ex) {
        throw new UncheckedIOException(ex);
    }
    fileName = fileName.replace("\\", "/");
    String fileNameOut = fileName + ".out";
    String fileName2 = fileName + ".call";
    try (PrintWriter pw = new PrintWriter(fileName2)) {
        pw.println("sink('" + fileNameOut + "')");
        pw.println("source('" + fileName + "')");
    } catch (FileNotFoundException ex) {
        throw new UncheckedIOException(ex);
    }
    callR(rFolder, fileName2, false);
    try {
        Files.lines(Paths.get(fileNameOut)).forEach(s -> {
            String[] strs = s.split(";");
            Arrays.stream(strs).forEach(str_1 -> {
                String[] str = str_1.split(":");
                res.getData().setRowValue(str[0], Double.valueOf(str[1]));
            });
        });
    } catch (IOException ex) {
        Logger.getLogger(RUtils.class.getName()).log(Level.SEVERE, null, ex);
    }
    /*        java.io.InputStream is = proc.getInputStream();
    java.util.Scanner s = new java.util.Scanner(is).useDelimiter(";");
    while (s.hasNext()) {
    String[] str = s.next().split(":");
    res.getData().setRowValue(str[0], Double.valueOf(str[1]));
    //System.out.println(s.next());
    }*/
    return res;
}

From source file:com.ikanow.aleph2.harvest.logstash.utils.LogstashUtils.java

/**
 * Reads the given output file and outputs it to the logger with the spec'd log level.
 * @param logger/*  w  ww  . ja  v  a2 s  .com*/
 * @param level
 * @param output_file
 * @throws IOException 
 */
public static void sendOutputToLogger(final IBucketLogger logger, final Level level, final File output_file,
        final Optional<Long> max_lines) throws IOException {
    //      _logger.error("Reading output file: " + output_file + " to send to logger at level: " + level);
    Files.lines(output_file.toPath()).limit(max_lines.orElse(10000L)).forEach(line -> {
        try {
            //convert line to valid json, then parse json, build BMB object from it
            final String fixed_line = line.replaceAll(logstash_colon_search, logstash_colon_replace)
                    .replaceAll(logstash_arrow_search, logstash_arrow_replace)
                    .replaceAll(logstash_newline_search, logstash_newline_replace);
            final String plugin_fixed = fixPlugin(fixed_line);
            final ObjectNode line_object = (ObjectNode) _mapper.readTree(plugin_fixed);
            //move specific fields we want into BMB
            final Date date = parseLogstashDate(line_object.remove("timestamp").asText());
            final Level logstash_level = Level.valueOf(line_object.remove("level").asText());
            final String message = line_object.remove("message").asText();
            //move everything else into details map
            logger.inefficientLog(logstash_level,
                    new BasicMessageBean(date, true, LogstashHarvestService.class.getSimpleName(),
                            "test_output", null, message,
                            StreamSupport
                                    .stream(Spliterators.spliteratorUnknownSize(line_object.fields(),
                                            Spliterator.ORDERED), true)
                                    .collect(Collectors.toMap(e -> e.getKey(), e -> e.getValue().asText()))));
        } catch (Exception ex) {
            //fallback on conversion failure
            logger.inefficientLog(level, ErrorUtils
                    .buildSuccessMessage(LogstashHarvestService.class.getSimpleName(), "test_output", line));
        }
    });
    //TODO should we delete log file after we've read it?
}

From source file:com.willwinder.universalgcodesender.gcode.GcodeParserTest.java

@Test
public void autoLevelerProcessorSet() throws Exception {
    System.out.println("autoLevelerProcessorSet");
    GcodeParser gcp = new GcodeParser();
    gcp.addCommandProcessor(new CommentProcessor());
    gcp.addCommandProcessor(new ArcExpander(true, 0.1));
    gcp.addCommandProcessor(new LineSplitter(1));
    Position grid[][] = { { new Position(-5, -5, 0, MM), new Position(-5, 35, 0, MM) },
            { new Position(35, -5, 0, MM), new Position(35, 35, 0, MM) } };
    gcp.addCommandProcessor(new MeshLeveler(0, grid, Units.MM));

    Path output = Files.createTempFile("autoleveler_processor_set_test.nc", "");

    // Copy resource to temp file since my parser methods need it that way.
    URL file = this.getClass().getClassLoader().getResource("./gcode/circle_test.nc");
    File tempFile = File.createTempFile("temp", "file");
    IOUtils.copy(file.openStream(), FileUtils.openOutputStream(tempFile));

    GcodeParserUtils.processAndExport(gcp, tempFile, output.toFile());

    GcodeStreamReader reader = new GcodeStreamReader(output.toFile());

    file = this.getClass().getClassLoader().getResource("./gcode/circle_test.nc.processed");
    Files.lines(Paths.get(file.toURI())).forEach((t) -> {
        try {//from  w  w  w .ja v a2  s  . c o m
            GcodeCommand c = reader.getNextCommand();
            if (c == null) {
                Assert.fail("Reached end of gcode reader before end of expected commands.");
            }
            Assert.assertEquals(c.getCommandString(), t);
        } catch (IOException ex) {
            Assert.fail("Unexpected exception.");
        }
    });
    assertEquals(1027, reader.getNumRows());
    output.toFile().delete();
}

From source file:org.interpss.service.train_data.impl.BaseAclfTrainCaseBuilder.java

protected void loadTextFile(String filename, Consumer<String> processor) {
    try (Stream<String> stream = Files.lines(Paths.get(filename))) {
        stream.filter(line -> {//from w w  w  .j av  a  2 s.  c o m
            return !line.startsWith("#") && !line.trim().equals("");
        }).forEach(processor);
    } catch (IOException e) {
        e.printStackTrace();
    }
}

From source file:org.niord.core.batch.BatchService.java

/**
 * Returns the contents of the log file with the given file name.
 * If fromLineNo is specified, only the subsequent lines are returned.
 *
 * @param instanceId the instance id/*from w  w w.  j a v a  2s  .c  o  m*/
 * @param logFileName the log file name
 * @param fromLineNo if specified, only the subsequent lines are returned
 * @return the contents of the log file
 */
public String getBatchJobLogFile(Long instanceId, String logFileName, Integer fromLineNo) throws IOException {

    BatchData job = findByInstanceId(instanceId);
    if (job == null) {
        throw new IllegalArgumentException("Invalid batch instance ID " + instanceId);
    }

    Path path = computeBatchJobPath(job.computeBatchJobFolderPath().resolve(logFileName));
    if (!Files.isRegularFile(path) || !Files.isReadable(path)) {
        throw new IllegalArgumentException("Invalid batch log file " + logFileName);
    }

    int skipLineNo = fromLineNo == null ? 0 : fromLineNo;
    try (Stream<String> logStream = Files.lines(path)) {
        return logStream.skip(skipLineNo).collect(Collectors.joining("\n"));
    }
}

From source file:com.redhat.example.rules.unittest.CsvTestHelper.java

private static String[] readCsv(String fileName) {
    Path file = Paths.get(fileName);

    List<String> rtn = new ArrayList<String>();
    try {//from w w  w  . jav  a  2s.c  o m
        Files.lines(file).skip(1).forEach(line -> rtn.add(line));
    } catch (IOException e) {
        e.printStackTrace();
        fail("fail at readCsv(" + fileName + ")");
    }
    return rtn.toArray(new String[0]);
}

From source file:org.omg.dmn.tck.Reporter.java

private static Map<String, Vendor> loadTestResults(Parameters params) {
    logger.info("Loading test results from folder " + params.input.getName());
    Map<String, Vendor> results = new TreeMap<>();

    File[] vendors = params.input.listFiles((dir, name) -> !name.startsWith(".") && !name.endsWith(".html"));
    for (File vendor : vendors) {
        File[] versions = vendor.listFiles((dir, name) -> !name.startsWith(".") && !name.endsWith(".html"));
        for (File version : versions) {
            File[] propertiesFile = version.listFiles((dir, name) -> name.equals("tck_results.properties"));
            Properties properties = new Properties();
            if (propertiesFile.length == 1) {
                try {
                    properties.load(new FileReader(propertiesFile[0]));
                } catch (IOException e) {
                    e.printStackTrace();
                }/*ww  w.  j  av  a  2  s .  co m*/
            }
            File[] resultsFile = version.listFiles((dir, name) -> name.equals("tck_results.csv"));

            if (resultsFile.length == 1) {
                Map<String, TestResult> testResults = new TreeMap<>();
                try (Stream<String> lines = Files.lines(resultsFile[0].toPath())) {
                    // skip the file header and load the rest
                    lines.forEach(l -> {
                        String[] fields = l.split(",");
                        TestResult testResult = new TestResult(fields[0], fields[1], fields[2],
                                TestResult.Result.fromString(fields[3]));
                        String testKey = createTestKey(fields[0], fields[1], fields[2]);
                        testResults.put(testKey, testResult);
                    });
                } catch (IOException e) {
                    logger.error("Error reading input file '" + params.input.getName() + "'", e);
                    continue;
                }
                Vendor v = new Vendor(properties.getProperty("vendor.name").trim(),
                        properties.getProperty("vendor.url").trim(),
                        properties.getProperty("product.name").trim(),
                        properties.getProperty("product.url").trim(),
                        properties.getProperty("product.version").trim(),
                        properties.getProperty("product.comment").trim(), testResults);
                results.put(v.getName() + " / " + v.getVersion(), v);
                logger.info(testResults.size() + " test results loaded for vendor " + v);
            }
        }
    }
    return results;
}

From source file:org.wildfly.security.tool.FileSystemRealmCommand.java

/**
 * Handles input being given as a descriptor file
 *
 * @throws Exception Exception to be handled by Elytron Tool
 *//*from  w  w  w.j  a va2s  .co m*/
private void parseDescriptorFile(String file) throws Exception {
    Path path = Paths.get(file);
    if (!path.toFile().exists()) {
        errorHandler(ElytronToolMessages.msg.fileNotFound(file));
    }

    Descriptor descriptor = new Descriptor();
    AtomicInteger count = new AtomicInteger(1);
    try (Stream<String> stream = Files.lines(path)) {
        stream.forEach(line -> {
            if (line.equals("")) {
                findMissingRequiredValuesAndSetValues(count.intValue(), descriptor);
                copyAddResetDescriptor(descriptor);
                count.getAndIncrement();
            } else {
                // Since Windows URIs have a colon, only split at first occurrence
                String[] parts = line.split(":", 2);
                String option = parts[0];
                String arg = parts[1];
                switch (option) {
                case USERS_FILE_PARAM:
                    descriptor.setUsersFile(arg);
                    break;
                case ROLES_FILE_PARAM:
                    descriptor.setRolesFile(arg);
                    break;
                case OUTPUT_LOCATION_PARAM:
                    descriptor.setOutputLocation(arg);
                    break;
                case FILESYSTEM_REALM_NAME_PARAM:
                    descriptor.setFileSystemRealmName(arg);
                    break;
                case SECURITY_DOMAIN_NAME_PARAM:
                    descriptor.setSecurityDomainName(arg);
                    break;
                }
            }
        });
    } catch (IOException e) {
        errorHandler(e);
    }
    int currentCount = count.intValue();
    findMissingRequiredValuesAndSetValues(currentCount, descriptor);
    copyAddResetDescriptor(descriptor);
    if (summaryMode) {
        printDescriptorBlocks(currentCount);
    }
    count.getAndIncrement();
}

From source file:org.wildfly.security.tool.FileSystemRealmCommand.java

/**
 * Parses a uses or roles properties file to get the user mappings (users-password for users files
 * and users-roles for roles files).//  w  w  w  . j a va  2 s .com
 *
 * @param descriptor The descriptor class holding the information for this block
 * @param param The type of input file (either users-file or roles-file)
 * @param count The index of the current descriptor block
 * @return The String list of each user mapping in the file
 * @throws Exception Exception to be handled by Elytron Tool
 */
private List<String> parseInputFile(Descriptor descriptor, String param, int count) throws Exception {
    List<String> entries = new ArrayList<>();
    String paramValue = getDescriptorParam(param, descriptor);
    boolean valueValid = true;
    Path path = null;
    if (paramValue == null) {
        warningHandler(ElytronToolMessages.msg.noValueFound(param));
        valueValid = false;
    } else {
        path = Paths.get(paramValue);
        if (!path.toFile().exists()) {
            if (descriptors.size() == 1)
                errorHandler(ElytronToolMessages.msg.fileNotFound(paramValue));
            warningHandler(ElytronToolMessages.msg.fileNotFound(paramValue).getMessage());
            warningHandler(ElytronToolMessages.msg.skippingDescriptorBlock(count,
                    String.format("could not find file for %s", param)));
            valueValid = false;
        }
    }
    if (valueValid) {
        try (Stream<String> stream = Files.lines(path)) {
            stream.forEach(line -> {
                if (line.startsWith("#$REALM_NAME=")) {
                    line = line.substring(line.indexOf("=") + 1);
                    line = line.substring(0, line.indexOf("$"));
                    descriptor.setRealmName(line);
                } else if (!line.startsWith("#")) {
                    entries.add(line);
                }
            });
        }
    }
    return entries;
}