Example usage for java.nio.file Files newDirectoryStream

List of usage examples for java.nio.file Files newDirectoryStream

Introduction

In this page you can find the example usage for java.nio.file Files newDirectoryStream.

Prototype

public static DirectoryStream<Path> newDirectoryStream(Path dir) throws IOException 

Source Link

Document

Opens a directory, returning a DirectoryStream to iterate over all entries in the directory.

Usage

From source file:org.bimserver.plugins.ResourceFetcher.java

public Set<String> listKeys(String key) {
    Path path = getPath(key);/*from   ww  w  .  j  a v a  2 s .  c o m*/
    Set<String> result = new HashSet<>();
    if (path != null) {
        try {
            for (Path path2 : Files.newDirectoryStream(path)) {
                result.add(path2.getFileName().toString());
            }
        } catch (IOException e) {
            e.printStackTrace();
        }
    }
    return result;
}

From source file:org.tinymediamanager.core.movie.tasks.MovieActorImageFetcher.java

@Override
public void run() {
    // try/catch block in the root of the thread to log crashes
    try {// w  w  w .  jav  a 2 s  .  c  om

        // check if actors folder exists
        Path actorsDir = movie.getPathNIO().resolve(MovieActor.ACTOR_DIR);
        if (!Files.isDirectory(actorsDir)) {
            Files.createDirectory(actorsDir);
        }

        // first check which actors images can be deleted
        try (DirectoryStream<Path> directoryStream = Files.newDirectoryStream(actorsDir)) {
            for (Path path : directoryStream) {
                if (Utils.isRegularFile(path) && path.getFileName().toString().matches("(?i).*\\.(tbn|png|jpg)")
                        && !path.getFileName().toString().startsWith(".")) {
                    boolean found = false;
                    // check if there is an actor for this file
                    String actorImage = FilenameUtils.getBaseName(path.getFileName().toString()).replace("_",
                            " ");
                    for (MovieActor actor : movie.getActors()) {
                        if (actor.getName().equals(actorImage)) {
                            found = true;

                            // trick it to get rid of wrong extensions
                            if (!FilenameUtils.getExtension(path.getFileName().toString())
                                    .equalsIgnoreCase(UrlUtil.getExtension(actor.getThumbUrl()))) {
                                found = false;
                            }
                            break;
                        }
                    }
                    // delete image if not found
                    if (!found) {
                        Utils.deleteFileWithBackup(path, movie.getDataSource());
                    }
                }
            }
        } catch (IOException ex) {
        }

        // second download missing images
        for (MovieActor actor : movie.getActors()) {
            Path actorImage = actor.getStoragePath();

            if (actorImage != null && StringUtils.isNotEmpty(actor.getThumbUrl())
                    && !Files.exists(actorImage)) {
                Path cache = ImageCache.getCachedFile(actor.getThumbUrl());
                if (cache != null) {
                    Utils.copyFileSafe(cache, actorImage);
                }
            } else {
                LOGGER.warn("Cannot download actor image " + actor);
            }
        }

    } catch (Exception e) {
        LOGGER.error("Thread crashed: ", e);
    }
}

From source file:org.sakuli.services.common.LogCleanUpResultServiceImpl.java

/**
 * Cleans the {@link Path} from files, which are older then {@link SakuliProperties#logMaxAge}.
 * On error no exception will be thrown, du to the facts, that`s only a optional cleanup.
 *
 * @param path root folder of files to clean
 *///from w  w  w  . j a va 2s  . co m
void cleanUpDirectory(Path path) {
    try {
        Instant maxDate = Instant.now().minus(sakuliProperties.getLogMaxAge(), ChronoUnit.DAYS);
        Files.newDirectoryStream(path).forEach(e -> {
            if (Files.isDirectory(e)) {
                cleanUpDirectory(e);
            } else if (Files.isRegularFile(e)) {
                try {
                    if (Files.getLastModifiedTime(e).toInstant().isBefore(maxDate)) {
                        LOGGER.info("cleanup to old log file '{}'", e);
                        Files.deleteIfExists(e);
                    }
                } catch (IOException e1) {
                    LOGGER.error("can`t delete file", e1);
                }
            }
        });
    } catch (IOException e) {
        LOGGER.error("couldn`t access log file directory '" + path + "'", e);
    }
}

From source file:de.fatalix.book.importer.CalibriImporter.java

public static void processBooks(Path root, String solrURL, String solrCore, final int batchSize)
        throws IOException, SolrServerException {
    final SolrServer solrServer = SolrHandler.createConnection(solrURL, solrCore);
    final List<BookEntry> bookEntries = new ArrayList<>();
    Files.walkFileTree(root, new SimpleFileVisitor<Path>() {

        @Override//from   w w  w.  ja  v a  2s.c o m
        public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException {
            if (dir.toString().contains("__MACOSX")) {
                return FileVisitResult.SKIP_SUBTREE;
            }
            try (DirectoryStream<Path> directoryStream = Files.newDirectoryStream(dir)) {
                BookEntry bookEntry = new BookEntry().setUploader("admin");
                for (Path path : directoryStream) {
                    if (!Files.isDirectory(path)) {
                        if (path.toString().contains(".opf")) {
                            bookEntry = parseOPF(path, bookEntry);
                        }
                        if (path.toString().contains(".mobi")) {
                            bookEntry.setMobi(Files.readAllBytes(path)).setMimeType("MOBI");
                        }
                        if (path.toString().contains(".epub")) {
                            bookEntry.setEpub(Files.readAllBytes(path));
                        }
                        if (path.toString().contains(".jpg")) {
                            bookEntry.setCover(Files.readAllBytes(path));
                            ByteArrayOutputStream output = new ByteArrayOutputStream();
                            Thumbnails.of(new ByteArrayInputStream(bookEntry.getCover())).size(130, 200)
                                    .toOutputStream(output);
                            bookEntry.setThumbnail(output.toByteArray());
                            bookEntry.setThumbnailGenerated("done");
                        }
                    }
                }
                if (bookEntry.getMobi() != null || bookEntry.getEpub() != null) {
                    bookEntries.add(bookEntry);
                    if (bookEntries.size() > batchSize) {
                        System.out.println("Adding " + bookEntries.size() + " Books...");
                        try {
                            SolrHandler.addBeans(solrServer, bookEntries);
                        } catch (SolrServerException ex) {
                            System.out.println(ex.getMessage());
                            ex.printStackTrace();
                        }
                        bookEntries.clear();
                    }
                }
            } catch (IOException ex) {
                ex.printStackTrace();
            }
            return super.preVisitDirectory(dir, attrs);
        }
    });
}

From source file:au.org.ands.vocabs.toolkit.provider.transform.SolrIndexTransformProvider.java

@Override
public final boolean transform(final TaskInfo taskInfo, final JsonNode subtask,
        final HashMap<String, String> results) {
    Path dir = Paths.get(ToolkitFileUtils.getTaskHarvestOutputPath(taskInfo));
    ConceptHandler conceptHandler = new ConceptHandler();
    try (DirectoryStream<Path> stream = Files.newDirectoryStream(dir)) {
        for (Path entry : stream) {
            RDFFormat format = Rio.getParserFormatForFileName(entry.toString());
            RDFParser rdfParser = Rio.createParser(format);
            rdfParser.setRDFHandler(conceptHandler);
            FileInputStream is = new FileInputStream(entry.toString());
            rdfParser.parse(is, entry.toString());

            logger.debug("Reading RDF:" + entry.toString());

        }// w w w  . j a v a 2s . c  om
    } catch (DirectoryIteratorException | IOException | RDFParseException | RDFHandlerException ex) {
        // I/O error encountered during the iteration,
        // the cause is an IOException
        results.put(TaskStatus.EXCEPTION, "Exception in SolrIndexTransform while Parsing RDF");
        logger.error("Exception in SolrIndexTransform while Parsing RDF:", ex);
        return false;
    }

    String resultFileName = ToolkitFileUtils.getTaskOutputPath(taskInfo, "concepts_solr.json");
    try {
        FileOutputStream out = new FileOutputStream(resultFileName);
        JsonObjectBuilder job = Json.createObjectBuilder();
        job.add("concepts_count", conceptHandler.getCountedPrefLabels());
        results.put("concepts_count", Integer.toString(conceptHandler.getCountedPrefLabels()));
        job.add("concepts_text", conceptHandler.getConceptText());
        results.put("concepts_solr", resultFileName);

        JsonWriter jsonWriter = Json.createWriter(out);
        jsonWriter.writeObject(job.build());
        jsonWriter.close();
    } catch (FileNotFoundException ex) {
        results.put(TaskStatus.EXCEPTION, "Exception in SolrIndexTransform while generating result");
        logger.error("Exception in SolrIndexTransform generating result:", ex);
        return false;
    }
    return true;
}

From source file:net.certiv.antlr.project.util.Utils.java

/**
 * Clears (deletes) all files from the given directory.
 * /* w  w w  . j ava2s.c  o m*/
 * @param dir
 *            the directory to clear
 * @return true if all files were successfully deleted
 * @throws IOException
 */
public static boolean deleteAllFiles(File dir) throws IOException {
    if (dir == null)
        throw new IllegalArgumentException("Directory cannot be null");
    if (!dir.exists() || !dir.isDirectory())
        throw new IOException("Directory must exist");

    DirectoryStream<Path> ds = Files.newDirectoryStream(dir.toPath());
    int del = 0;
    int tot = 0;
    for (Path p : ds) {
        File f = p.toFile();
        String name = f.getName();
        if (f.isFile()) {
            tot++;
            boolean ok = f.delete();
            if (ok) {
                del++;
            } else {
                Log.warn(Utils.class, "Failed to delete: " + name);
            }
        }
    }
    Log.info(Utils.class, "Deleted " + del + " of " + tot + " files");
    return del == tot;
}

From source file:au.org.ands.vocabs.toolkit.provider.transform.JsonListTransformProvider.java

@Override
public final boolean transform(final TaskInfo taskInfo, final JsonNode subtask,
        final HashMap<String, String> results) {
    Path dir = Paths.get(ToolkitFileUtils.getTaskHarvestOutputPath(taskInfo));
    ConceptHandler conceptHandler = new ConceptHandler();
    try (DirectoryStream<Path> stream = Files.newDirectoryStream(dir)) {
        for (Path entry : stream) {
            RDFFormat format = Rio.getParserFormatForFileName(entry.toString());
            RDFParser rdfParser = Rio.createParser(format);
            rdfParser.setRDFHandler(conceptHandler);
            FileInputStream is = new FileInputStream(entry.toString());
            rdfParser.parse(is, entry.toString());
            logger.debug("Reading RDF:" + entry.toString());

        }/*from   ww w  .j a v  a 2  s  .  co  m*/
    } catch (DirectoryIteratorException | IOException | RDFParseException | RDFHandlerException
            | UnsupportedRDFormatException ex) {
        results.put(TaskStatus.EXCEPTION, "Exception in JsonListTransform while Parsing RDF");
        logger.error("Exception in JsonListTransform while Parsing RDF:", ex);
        return false;
    }

    String resultFileName = ToolkitFileUtils.getTaskOutputPath(taskInfo, "concepts_list.json");
    try {
        File out = new File(resultFileName);
        results.put("concepts_list", resultFileName);
        HashMap<String, HashMap<String, Object>> conceptMap = conceptHandler.getConceptMap();
        FileUtils.writeStringToFile(out, TaskUtils.mapToJSONString(conceptMap));
    } catch (IOException ex) {
        results.put(TaskStatus.EXCEPTION, "Exception in JsonListTransform while Parsing RDF");
        logger.error("Exception in JsonListTransform generating result:", ex);
        return false;
    }
    return true;
}

From source file:ch.mattrero.foldersync.FoldersSynchronizer.java

void syncTree(final Path sourceSubDir) {

    SyncStatus status = null;//from   ww  w .j  a v  a 2s  .c  om
    BasicFileAttributes fromAttributes = null;
    BasicFileAttributes toAttributes = null;

    try (final DirectoryStream<Path> sourceStream = Files.newDirectoryStream(sourceSubDir);
            DirectoryStream<Path> backupStream = Files
                    .newDirectoryStream(resolveBackupItemPath(sourceSubDir))) {

        final Iterator<Path> sourceIterator = sourceStream.iterator();
        final Iterator<Path> backupIterator = backupStream.iterator();

        Path sourceItem = (sourceIterator.hasNext() ? sourceIterator.next() : null);
        Path backupItem = (backupIterator.hasNext() ? backupIterator.next() : null);

        while (sourceItem != null || backupItem != null) {
            if (sourceItem == null) {
                status = DELETED;
            } else if (backupItem == null) {
                status = ADDED;
            } else if (sourceDir.relativize(sourceItem).compareTo(backupDir.relativize(backupItem)) < 0) {
                status = ADDED;
            } else if (sourceDir.relativize(sourceItem).compareTo(backupDir.relativize(backupItem)) > 0) {
                status = DELETED;
            } else if (Files.isDirectory(sourceItem) != Files.isDirectory(backupItem)) {
                status = MODIFIED;
            } else if (Files.isDirectory(sourceItem)) {
                status = SYNCHRONIZED;
            } else {
                fromAttributes = Files.readAttributes(sourceItem, BasicFileAttributes.class);
                toAttributes = Files.readAttributes(backupItem, BasicFileAttributes.class);

                if (Math.abs(fromAttributes.lastModifiedTime().toMillis()
                        - toAttributes.lastModifiedTime().toMillis()) > 0) {
                    status = MODIFIED;
                } else if (fromAttributes.size() != toAttributes.size()) {
                    status = MODIFIED;
                } else {
                    status = SYNCHRONIZED;
                }
            }

            switch (status) {
            case ADDED:
                syncAdded(sourceItem);
                sourceItem = (sourceIterator.hasNext() ? sourceIterator.next() : null);
                break;
            case DELETED:
                syncDeleted(sourceDir.resolve(backupDir.relativize(backupItem)));
                backupItem = (backupIterator.hasNext() ? backupIterator.next() : null);
                break;
            case MODIFIED:
                syncModified(sourceItem);
            case SYNCHRONIZED:
            default:
                if (Files.isDirectory(sourceItem)) {
                    syncTree(sourceItem);
                }
                sourceItem = (sourceIterator.hasNext() ? sourceIterator.next() : null);
                backupItem = (backupIterator.hasNext() ? backupIterator.next() : null);
                break;
            }
        }

    } catch (final IOException | SecurityException e) {
        logger.debug("Failed to sync tree " + sourceSubDir, e);
    }
}

From source file:org.xenmaster.web.TemplateHook.java

@Override
public void handle(RequestBundle rb) throws IOException {
    if (rb.getPathParts().length < 1) {
        return;//from w  w w  .  j  a va2 s  . c  o  m
    }
    String path = "";
    try {
        String concat = StringUtils.join(rb.getPathParts(), '/');
        URI uri = new URI(concat);
        uri = uri.normalize();
        path = uri.getPath();
    } catch (URISyntaxException ex) {
        Logger.getLogger(getClass()).error(ex);
    }
    if (path.isEmpty()) {
        return;
    }

    path = Settings.getInstance().getString("WebContentPath") + "/" + this.getSelector() + "/" + path;
    File f = new File(path);
    if (f.exists() && f.isDirectory()) {
        Path p = f.toPath();
        JsonObject contentTree = new JsonObject();
        try (DirectoryStream<Path> stream = Files.newDirectoryStream(p)) {
            for (Path file : stream) {
                if (file.toFile().isFile() && !file.startsWith(".")) {
                    contentTree.addProperty(FilenameUtils.getBaseName(file.toString()),
                            IOUtils.toString(new FileInputStream(file.toFile())));
                }
            }
        }
        Gson gson = new Gson();
        rb.replyWithString(gson.toJson(contentTree));
    }
}

From source file:com.arcanix.php.phar.DirectoryPharEntryProvider.java

private void addPharEntriesRecursively(final List<PharEntry> pharEntries, final Path directory)
        throws IOException {
    try (DirectoryStream<Path> directoryStream = Files.newDirectoryStream(directory)) {
        for (Path element : directoryStream) {
            File file = element.toFile();
            if (file.isDirectory()) {
                addPharEntriesRecursively(pharEntries, element);
            } else {
                String relativePath = this.rootPath.relativize(element).toString();
                pharEntries.add(new PharEntry(file, this.localPath + "/" + relativePath, this.pharCompression));
            }//  w ww.jav  a 2 s  .c  om
        }
    }
}