Example usage for java.nio.file Files newDirectoryStream

List of usage examples for java.nio.file Files newDirectoryStream

Introduction

In this page you can find the example usage for java.nio.file Files newDirectoryStream.

Prototype

public static DirectoryStream<Path> newDirectoryStream(Path dir) throws IOException 

Source Link

Document

Opens a directory, returning a DirectoryStream to iterate over all entries in the directory.

Usage

From source file:general.Main.java

/**
 * Loads all pre-build query types./*from   w  ww  .  j a va  2s. c o m*/
 */
private static void loadPreBuildQueryTypes() {
    try (DirectoryStream<Path> directoryStream = Files
            .newDirectoryStream(Paths.get("preBuildQueryTypeFiles"))) {
        for (Path filePath : directoryStream) {
            if (Files.isRegularFile(filePath)) {
                if (filePath.toString().endsWith(".preBuildQueryType")) {
                    String queryString = new String(readAllBytes(filePath));
                    OpenRDFQueryHandler queryHandler = new OpenRDFQueryHandler();
                    //queryHandler.setValidityStatus(1);
                    queryHandler.setQueryString(queryString);
                    if (queryHandler.getValidityStatus() != 1) {
                        logger.info("The Pre-build query " + filePath + " is no valid SPARQL");
                        continue;
                    }
                    ParsedQuery normalizedPreBuildQuery = queryHandler.getNormalizedQuery();
                    String queryTypeName = filePath.toString().substring(
                            filePath.toString().lastIndexOf("/") + 1, filePath.toString().lastIndexOf("."));
                    if (normalizedPreBuildQuery != null) {
                        queryTypes.put(normalizedPreBuildQuery, queryTypeName);
                    } else {
                        logger.info("Pre-build query " + queryTypeName + " could not be parsed.");
                    }
                }
                if (filePath.toString().endsWith(".tsv")) {
                    TsvParserSettings parserSettings = new TsvParserSettings();
                    parserSettings.setLineSeparatorDetectionEnabled(true);
                    parserSettings.setHeaderExtractionEnabled(true);
                    parserSettings.setSkipEmptyLines(true);
                    parserSettings.setReadInputOnSeparateThread(true);

                    ObjectRowProcessor rowProcessor = new ObjectRowProcessor() {
                        @Override
                        public void rowProcessed(Object[] row, ParsingContext parsingContext) {
                            if (row.length <= 1) {
                                logger.warn("Ignoring line without tab while parsing.");
                                return;
                            }
                            if (row.length == 5) {
                                queryTypeToToolMapping.put(new Tuple2<>(row[0].toString(), row[1].toString()),
                                        new Tuple2<>(row[2].toString(), row[3].toString()));
                                return;
                            }
                            logger.warn("Line with row length " + row.length
                                    + " found. Is the formatting of toolMapping.tsv correct?");
                            return;
                        }

                    };

                    parserSettings.setProcessor(rowProcessor);

                    TsvParser parser = new TsvParser(parserSettings);

                    parser.parse(filePath.toFile());
                }
            }

        }

    } catch (IOException e) {
        logger.error("Could not read from directory inputData/queryType/premadeQueryTypeFiles", e);
    }
}

From source file:org.apache.tika.batch.fs.FSBatchTestBase.java

/**
 * Counts immediate children only, does not work recursively
 * @param p//from  w w w.java 2 s. c om
 * @return
 * @throws IOException
 */
public static int countChildren(Path p) throws IOException {
    int i = 0;
    try (DirectoryStream<Path> ds = Files.newDirectoryStream(p)) {
        Iterator<Path> it = ds.iterator();
        while (it.hasNext()) {
            i++;
            it.next();
        }
    }
    return i;
}

From source file:com.upplication.s3fs.util.AmazonS3ClientMock.java

@Override
public ObjectListing listNextBatchOfObjects(ObjectListing previousObjectListing) {
    ObjectListing objectListing = new ObjectListing();
    objectListing.setBucketName(previousObjectListing.getBucketName());
    objectListing.setPrefix(previousObjectListing.getPrefix());
    objectListing.setMarker(previousObjectListing.getMarker());
    objectListing.setDelimiter(previousObjectListing.getDelimiter());

    if (!previousObjectListing.isTruncated() || previousObjectListing.getNextMarker() == null) {
        return objectListing;
    }/*w  ww .ja va 2 s .  c  o m*/

    Path bucket = find(previousObjectListing.getBucketName());
    List<S3Element> elems = new ArrayList<>();
    try {
        for (Path elem : Files.newDirectoryStream(bucket)) {
            elems.add(parse(elem, bucket));
        }
    } catch (IOException e) {
        throw new AmazonClientException(e);
    }
    Collections.sort(elems, new Comparator<S3Element>() {
        @Override
        public int compare(S3Element o1, S3Element o2) {
            return o1.getS3Object().getKey().compareTo(o2.getS3Object().getKey());
        }
    });
    Iterator<S3Element> iterator = elems.iterator();

    int i = 0;
    boolean continueElement = false;

    while (iterator.hasNext()) {

        S3Element elem = iterator.next();

        if (!continueElement && elem.getS3Object().getKey().equals(previousObjectListing.getNextMarker())) {
            continueElement = true;
        }

        if (continueElement) {
            // TODO. add delimiter and marker support
            if (previousObjectListing.getPrefix() != null
                    && elem.getS3Object().getKey().startsWith(previousObjectListing.getPrefix())) {

                S3ObjectSummary s3ObjectSummary = parseToS3ObjectSummary(elem);
                objectListing.getObjectSummaries().add(s3ObjectSummary);
                // max 1000 elements at same time.
                if (i + 1 == LIMIT_AWS_MAX_ELEMENTS && iterator.hasNext()) {
                    objectListing.setTruncated(true);
                    objectListing.setNextMarker(iterator.next().getS3Object().getKey());
                    return objectListing;
                }
                objectListing.setTruncated(false);
                i++;
            }
        }
    }

    return objectListing;
}

From source file:company.gonapps.loghut.dao.PostDao.java

public List<PostDto> getList(int year, int month) throws IOException, InvalidTagNameException {
    List<PostDto> posts = new LinkedList<>();

    rrwl.readLock().lock();//from  w w  w  .j  a va  2s  . c om
    try (DirectoryStream<Path> ds = Files.newDirectoryStream(Paths.get(settingDao.getSetting("posts.directory")
            + "/" + String.format("%04d", year) + "/" + String.format("%02d", month)))) {

        for (Path path : ds) {
            Matcher matcher = postPathStringPattern.matcher(path.toString());
            if (matcher.find())
                posts.add(get(Integer.parseInt(matcher.group("year")), Integer.parseInt(matcher.group("month")),
                        Integer.parseInt(matcher.group("day")), Integer.parseInt(matcher.group("number")),
                        (matcher.group("secret").equals("s"))));
        }
    } finally {
        rrwl.readLock().unlock();
    }

    Collections.sort(posts, new PostDtoComparator());

    return posts;
}

From source file:org.apache.karaf.tooling.ArchiveMojo.java

private void addFileToTarGz(TarArchiveOutputStream tOut, Path f, String base) throws IOException {
    if (Files.isDirectory(f)) {
        String entryName = base + f.getFileName().toString() + "/";
        TarArchiveEntry tarEntry = new TarArchiveEntry(entryName);
        tOut.putArchiveEntry(tarEntry);/*from w w w . jav  a  2  s  .co  m*/
        tOut.closeArchiveEntry();
        try (DirectoryStream<Path> children = Files.newDirectoryStream(f)) {
            for (Path child : children) {
                addFileToTarGz(tOut, child, entryName);
            }
        }
    } else if (useSymLinks && Files.isSymbolicLink(f)) {
        String entryName = base + f.getFileName().toString();
        TarArchiveEntry tarEntry = new TarArchiveEntry(entryName, TarConstants.LF_SYMLINK);
        tarEntry.setLinkName(Files.readSymbolicLink(f).toString());
        tOut.putArchiveEntry(tarEntry);
        tOut.closeArchiveEntry();
    } else {
        String entryName = base + f.getFileName().toString();
        TarArchiveEntry tarEntry = new TarArchiveEntry(entryName);
        tarEntry.setSize(Files.size(f));
        if (entryName.contains("/bin/") || (!usePathPrefix && entryName.startsWith("bin/"))) {
            if (entryName.endsWith(".bat")) {
                tarEntry.setMode(0644);
            } else {
                tarEntry.setMode(0755);
            }
        }
        tOut.putArchiveEntry(tarEntry);
        Files.copy(f, tOut);
        tOut.closeArchiveEntry();
    }
}

From source file:com.streamsets.pipeline.lib.io.LiveFile.java

/**
 * Refreshes the <code>LiveFile</code>, if the file was renamed, the path will have the new name.
 *
 * @return the refreshed file if the file has been renamed, or itself if the file has not been rename or the file
 * does not exist in the directory anymore.
 * @throws IOException thrown if the LiveFile could not be refreshed
 *//* ww  w . j  av a 2  s .  c  o m*/
public LiveFile refresh() throws IOException {
    LiveFile refresh = this;
    boolean changed;
    try {
        BasicFileAttributes attrs = Files.readAttributes(path, BasicFileAttributes.class);
        String iNodeCurrent = attrs.fileKey().toString();
        int headLenCurrent = (int) Math.min(headLen, attrs.size());
        String headHashCurrent = computeHash(path, headLenCurrent);
        changed = !this.iNode.equals(iNodeCurrent) || !this.headHash.equals(headHashCurrent);
    } catch (NoSuchFileException ex) {
        changed = true;
    }
    if (changed) {

        try (DirectoryStream<Path> directoryStream = Files.newDirectoryStream(path.getParent())) {
            for (Path path : directoryStream) {
                if (path.toFile().isDirectory()) {
                    continue;
                }
                BasicFileAttributes attrs = Files.readAttributes(path, BasicFileAttributes.class);
                String iNode = attrs.fileKey().toString();
                int headLen = (int) Math.min(this.headLen, attrs.size());
                String headHash = computeHash(path, headLen);
                if (iNode.equals(this.iNode) && headHash.equals(this.headHash)) {
                    if (headLen == 0) {
                        headLen = (int) Math.min(HEAD_LEN, attrs.size());
                        headHash = computeHash(path, headLen);
                    }
                    return new LiveFile(path, iNode, headHash, headLen);
                } /**rename??*/
            }
        }
        return null;
    } /**change? itself*/
    return refresh;
}

From source file:uk.co.unclealex.executable.impl.MakeLinksCommandRunnerTest.java

protected String[] filenamesIn(Path dir) throws IOException {
    Function<Path, String> nameFunction = new Function<Path, String>() {
        @Override/* w w w  . j  a  v  a 2  s. c  o  m*/
        public String apply(Path path) {
            return path.getFileName().toString();
        }
    };
    return Iterables.toArray(Sets.newTreeSet(Iterables.transform(Files.newDirectoryStream(dir), nameFunction)),
            String.class);
}

From source file:org.jenkinsci.plugins.ssegateway.EventHistoryStore.java

private synchronized static void deleteAllFilesInDir(File dir, Long olderThan) throws IOException {
    Path dirPath = Paths.get(dir.toURI());

    try (final DirectoryStream<Path> dirStream = Files.newDirectoryStream(dirPath)) {
        for (final Path entry : dirStream) {
            File file = entry.toFile();
            if (file.isDirectory()) {
                deleteAllFilesInDir(file, olderThan);
            }//from   ww w . j a  v  a 2s  . c o m
            if (olderThan == null || file.lastModified() < olderThan) {
                if (!file.delete()) {
                    LOGGER.log(Level.SEVERE, "Error deleting file " + file.getAbsolutePath());
                }
            }
        }
    }
}

From source file:org.carcv.impl.core.io.FFMPEGVideoHandlerIT.java

/**
 * Test method for {@link org.carcv.impl.core.io.FFMPEGVideoHandler#splitIntoFrames(java.nio.file.Path)}.
 *
 * @throws IOException/*from  w  w w .  j a  va 2s .co  m*/
 */
@Test
public void testSplitIntoFramesPath() throws IOException {
    FFMPEGVideoHandler fvh = new FFMPEGVideoHandler();
    FFMPEGVideoHandler.copyCarImagesToDir(entry.getCarImages(), videoDir);
    Path video = fvh.generateVideo(videoDir, FFMPEGVideoHandler.defaultFrameRate);

    assertTrue("Split failed.", FFMPEGVideoHandler.splitIntoFrames(video));

    Path dir = Paths.get(video.toString() + ".dir");
    DirectoryStream<Path> paths = Files.newDirectoryStream(dir);
    int counter = 0;
    for (@SuppressWarnings("unused")
    Path p : paths) {
        counter++;
    }
    assertEquals(entry.getCarImages().size(), counter);

    Files.delete(video);
    DirectoryWatcher.deleteDirectory(dir);
}

From source file:org.syncany.plugins.local.LocalTransferManager.java

@Override
public <T extends RemoteFile> Map<String, T> list(Class<T> remoteFileClass) throws StorageException {
    connect();// w ww  .j a  v  a 2  s.  c om

    Path folder = Paths.get(getRemoteFilePath(remoteFileClass));
    Map<String, T> files = Maps.newHashMap();

    try (DirectoryStream<Path> directoryStream = Files.newDirectoryStream(folder)) {
        for (Path path : directoryStream) {
            try {
                T remoteFile = RemoteFile.createRemoteFile(path.getFileName().toString(), remoteFileClass);
                files.put(path.getFileName().toString(), remoteFile);
            } catch (StorageException e) {
                logger.log(Level.INFO, "Cannot create instance of " + remoteFileClass.getSimpleName()
                        + " for file " + path + "; maybe invalid file name pattern. Ignoring file.");
            }
        }
    } catch (IOException e) {
        logger.log(Level.SEVERE, "Unable to list directory", e);
    }

    return files;
}