Example usage for java.nio.file Files walkFileTree

List of usage examples for java.nio.file Files walkFileTree

Introduction

In this page you can find the example usage for java.nio.file Files walkFileTree.

Prototype

public static Path walkFileTree(Path start, FileVisitor<? super Path> visitor) throws IOException 

Source Link

Document

Walks a file tree.

Usage

From source file:com.scooter1556.sms.server.service.ScannerService.java

private void scanMediaFolder(MediaFolder folder) {
    Path path = FileSystems.getDefault().getPath(folder.getPath());
    ParseFiles fileParser = new ParseFiles(folder);

    try {//  w  ww.j  av  a 2s .  co m
        // Start Scan directory
        LogService.getInstance().addLogEntry(LogService.Level.INFO, CLASS_NAME,
                "Scanning media folder " + folder.getPath(), null);
        Files.walkFileTree(path, fileParser);

        // Add new media elements in database
        if (!fileParser.getNewMediaElements().isEmpty()) {
            mediaDao.createMediaElements(fileParser.getNewMediaElements());
        }

        // Update existing media elements in database
        if (!fileParser.getUpdatedMediaElements().isEmpty()) {
            mediaDao.updateMediaElementsByID(fileParser.getUpdatedMediaElements());
        }

        // Extract media streams from parsed media elements
        List<VideoStream> vStreams = new ArrayList<>();
        List<AudioStream> aStreams = new ArrayList<>();
        List<SubtitleStream> sStreams = new ArrayList<>();

        for (MediaElement element : fileParser.getAllMediaElements()) {
            if (element.getVideoStreams() != null) {
                vStreams.addAll(element.getVideoStreams());
            }

            if (element.getAudioStreams() != null) {
                aStreams.addAll(element.getAudioStreams());
            }

            if (element.getSubtitleStreams() != null) {
                sStreams.addAll(element.getSubtitleStreams());
            }
        }

        // Add media streams to database
        mediaDao.createVideoStreams(vStreams);
        mediaDao.createAudioStreams(aStreams);
        mediaDao.createSubtitleStreams(sStreams);

        // Add new playlists
        if (!fileParser.getNewPlaylists().isEmpty()) {
            for (Playlist playlist : fileParser.getNewPlaylists()) {
                mediaDao.createPlaylist(playlist);
            }
        }

        // Update existing playlists
        if (!fileParser.getUpdatedPlaylists().isEmpty()) {
            for (Playlist playlist : fileParser.getUpdatedPlaylists()) {
                mediaDao.updatePlaylistLastScanned(playlist.getID(), fileParser.getScanTime());
            }
        }

        // Remove files which no longer exist
        mediaDao.removeDeletedMediaElements(folder.getPath(), fileParser.getScanTime());
        mediaDao.removeDeletedPlaylists(folder.getPath(), fileParser.getScanTime());

        // Update folder statistics
        folder.setFolders(fileParser.getFolders());
        folder.setFiles(fileParser.getFiles());
        folder.setLastScanned(fileParser.getScanTime());

        // Determine primary media type in folder
        if (folder.getType() == null || folder.getType() == MediaFolder.ContentType.UNKNOWN) {
            int audio = 0, video = 0, playlist;

            // Get number of playlists
            playlist = fileParser.getAllPlaylists().size();

            // Iterate over media elements to determine number of each type
            for (MediaElement element : fileParser.getAllMediaElements()) {
                switch (element.getType()) {
                case MediaElementType.AUDIO:
                    audio++;
                    break;

                case MediaElementType.VIDEO:
                    video++;
                    break;
                }
            }

            if (audio == 0 && video == 0 && playlist > 0) {
                folder.setType(MediaFolder.ContentType.PLAYLIST);
            } else if (audio > video) {
                folder.setType(MediaFolder.ContentType.AUDIO);
            } else if (video > audio) {
                folder.setType(MediaFolder.ContentType.VIDEO);
            }
        }

        settingsDao.updateMediaFolder(folder);

        LogService.getInstance().addLogEntry(LogService.Level.INFO, CLASS_NAME,
                "Finished scanning media folder " + folder.getPath() + " (Items Scanned: "
                        + fileParser.getTotal() + ", Folders: " + fileParser.getFolders() + ", Files: "
                        + fileParser.getFiles() + ", Playlists: " + fileParser.getPlaylists() + ")",
                null);
    } catch (IOException ex) {
        LogService.getInstance().addLogEntry(LogService.Level.ERROR, CLASS_NAME,
                "Error scanning media folder " + folder.getPath(), ex);
    }
}

From source file:com.datafibers.kafka.connect.FileGenericSourceTask.java

/**
 * Looks for files that meet the glob criteria. If any found they will be added to the list of
 * files to be processed// ww w.  j  av  a  2  s . c  o m
 */
private void findMatch() {
    final PathMatcher globMatcher = FileSystems.getDefault().getPathMatcher("glob:".concat(glob));

    try {
        Files.walkFileTree(Paths.get(location), new SimpleFileVisitor<Path>() {
            @Override
            public FileVisitResult visitFile(Path path, BasicFileAttributes attributes) throws IOException {
                if (globMatcher.matches(path)) {
                    if (!processedPaths.contains(path)) {
                        inProgressPaths.add(path);
                    }
                }
                return FileVisitResult.CONTINUE;
            }

            @Override
            public FileVisitResult visitFileFailed(Path file, IOException e) throws IOException {
                return FileVisitResult.CONTINUE;
            }
        });
    } catch (IOException e) {
        e.printStackTrace();
    }
}

From source file:dk.dma.ais.downloader.QueryService.java

/**
 * Deletes all the file of the client folder
 *///from  w ww  .ja  v a  2 s .c  o  m
@RequestMapping(value = "/delete-all/{clientId}", method = RequestMethod.GET)
@ResponseBody
public String deleteFiles(@PathVariable("clientId") String clientId, HttpServletResponse response)
        throws IOException {

    int deletedFiles = 0;
    Path path = repoRoot.resolve(clientId);

    if (Files.notExists(path) || !Files.isDirectory(path)) {
        log.log(Level.WARNING, "Failed deleting files in " + path);
        response.setStatus(404);
        return "Failed deleting files in " + clientId;
    }

    try {
        Files.walkFileTree(path, new SimpleFileVisitor<Path>() {
            @Override
            public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
                log.info("Deleting repo file      :" + file);
                Files.delete(file);
                return FileVisitResult.CONTINUE;
            }
        });
    } catch (IOException e) {
        log.log(Level.SEVERE, "Failed cleaning up dir: " + path);
        return "Failed deleting files in " + clientId;
    }
    return "Deleted files in dir " + clientId;
}

From source file:com.github.fritaly.dualcommander.Utils.java

public static Scan scan(Collection<File> collection) {
    Validate.notNull(collection, "The given collection of files is null");

    final Scan scan = new Scan();

    for (File element : collection) {
        if (element.isFile()) {
            scan.visitFile(element);/*from  w  w  w .j  ava  2s.c o  m*/
        } else {
            try {
                Files.walkFileTree(element.toPath(), scan);
            } catch (IOException e) {
                throw new RuntimeException("Error when walking directory '" + element + "'", e);
            }
        }
    }

    return scan;
}

From source file:org.vpac.ndg.storage.util.TimeSliceUtil.java

public void cleanup(String timesliceId) {
    TimeSlice ts = timeSliceDao.retrieve(timesliceId);
    Path tsPath = getFileLocation(ts);
    try {//from  w w w .j a  v a2s  .com
        Files.walkFileTree(tsPath, new SimpleFileVisitor<Path>() {
            /**
             * Import netcdf dataset
             */
            @Override
            public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {

                String fileName = file.getFileName().toString();

                if (!fileName.endsWith("_old" + GdalFormat.NC.getExtension())) {
                    // IGNORE NON-BACKUP TILE
                    return FileVisitResult.CONTINUE;
                }

                Path backupTilePath = file.toAbsolutePath();
                try {
                    FileUtils.deleteIfExists(backupTilePath);
                    log.debug("CLEAN UP {}", backupTilePath);
                } catch (Exception e) {
                    log.error("{}", e);
                }

                return FileVisitResult.CONTINUE;
            }

            @Override
            public FileVisitResult postVisitDirectory(Path dir, IOException e) throws IOException {
                return FileVisitResult.CONTINUE;
            }
        });
    } catch (IOException e) {
        log.error("Error restoring {} caused by {}", tsPath, e);
    }
}

From source file:org.apache.jena.atlas.io.IO.java

/** Delete everything from a {@code Path} start point, including the path itself.
 * This function works on files or directories.
 * This function does not follow symbolic links.
 *///ww w.j a  va  2  s .  c o m
public static void deleteAll(Path start) {
    // Walks down the tree and delete directories on the way backup.
    try {
        Files.walkFileTree(start, new SimpleFileVisitor<Path>() {
            @Override
            public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
                Files.delete(file);
                return FileVisitResult.CONTINUE;
            }

            @Override
            public FileVisitResult postVisitDirectory(Path dir, IOException e) throws IOException {
                if (e == null) {
                    Files.delete(dir);
                    return FileVisitResult.CONTINUE;
                } else {
                    throw e;
                }
            }
        });
    } catch (IOException ex) {
        IO.exception(ex);
        return;
    }
}

From source file:org.eclipse.winery.repository.importing.CSARImporter.java

/**
 * Import an extracted CSAR from a directory
 * //from  w  w  w  .j ava 2  s  . com
 * @param path the root path of an extracted CSAR file
 * @param overwrite if true: contents of the repo are overwritten
 * @param asyncWPDParsing true if WPD should be parsed asynchronously to speed up the import.
 *        Required, because JUnit terminates the used ExecutorService
 * @throws InvalidCSARException
 * @throws IOException
 */
void importFromDir(final Path path, final List<String> errors, final boolean overwrite,
        final boolean asyncWPDParsing) throws IOException {
    Path toscaMetaPath = path.resolve("xml/TOSCA-Metadata/TOSCA.meta");
    if (!Files.exists(toscaMetaPath)) {
        toscaMetaPath = path.resolve("TOSCA-Metadata/TOSCA.meta");
    }

    if (!Files.exists(toscaMetaPath)) {
        errors.add("TOSCA.meta does not exist");
        return;
    }
    final TOSCAMetaFileParser tmfp = new TOSCAMetaFileParser();
    final TOSCAMetaFile tmf = tmfp.parse(toscaMetaPath);

    // we do NOT do any sanity checks, of TOSAC.meta
    // and just start parsing

    if (tmf.getEntryDefinitions() != null) {
        // we obey the entry definitions and "just" import that
        // imported definitions are added recursively
        Path defsPath = path.resolve(tmf.getEntryDefinitions());
        this.importDefinitions(tmf, defsPath, errors, overwrite, asyncWPDParsing);

        this.importSelfServiceMetaData(tmf, path, defsPath, errors);
    } else {
        // no explicit entry definitions found
        // we import all available definitions
        // The specification says (cos01, Section 16.1, line 2935) that all definitions are contained
        // in the "Definitions" directory
        // The alternative is to go through all entries in the TOSCA Meta File, but there is no
        // guarantee that this list is complete
        Path definitionsDir = path.resolve("Definitions");
        if (!Files.exists(definitionsDir)) {
            errors.add("No entry definitions defined and Definitions directory does not exist.");
            return;
        }
        final List<IOException> exceptions = new ArrayList<IOException>();
        Files.walkFileTree(definitionsDir, new SimpleFileVisitor<Path>() {

            @Override
            public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) {
                if (dir.endsWith("Definitions")) {
                    return FileVisitResult.CONTINUE;
                } else {
                    return FileVisitResult.SKIP_SUBTREE;
                }
            }

            @Override
            public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) {
                try {
                    CSARImporter.this.importDefinitions(tmf, file, errors, overwrite, asyncWPDParsing);
                } catch (IOException e) {
                    exceptions.add(e);
                    return FileVisitResult.TERMINATE;
                }
                return FileVisitResult.CONTINUE;
            }
        });

        if (!exceptions.isEmpty()) {
            // something went wrong during parsing
            // we rethrow the exception
            throw exceptions.get(0);
        }
    }

    this.importNamespacePrefixes(path);
}

From source file:com.netflix.nicobar.core.module.ScriptModuleLoader.java

/**
 * Add or update the existing {@link ScriptModule}s with the given script archives.
 * This method will convert the archives to modules and then compile + link them in to the
 * dependency graph. It will then recursively re-link any modules depending on the new modules.
 * If this loader already contains an old version of the module, it will be unloaded on
 * successful compile of the new module.
 *
 * @param candidateArchives archives to load or update
 *///from   w  ww .  j  a  v  a  2  s .co m
public synchronized void updateScriptArchives(Set<? extends ScriptArchive> candidateArchives) {
    Objects.requireNonNull(candidateArchives);
    long updateNumber = System.currentTimeMillis();

    // map script module id to archive to be compiled
    Map<ModuleId, ScriptArchive> archivesToCompile = new HashMap<ModuleId, ScriptArchive>(
            candidateArchives.size() * 2);

    // create an updated mapping of the scriptModuleId to latest revisionId including the yet-to-be-compiled archives
    Map<ModuleId, ModuleIdentifier> oldRevisionIdMap = jbossModuleLoader.getLatestRevisionIds();
    Map<ModuleId, ModuleIdentifier> updatedRevisionIdMap = new HashMap<ModuleId, ModuleIdentifier>(
            (oldRevisionIdMap.size() + candidateArchives.size()) * 2);
    updatedRevisionIdMap.putAll(oldRevisionIdMap);

    // Map of the scriptModuleId to it's updated set of dependencies
    Map<ModuleId, Set<ModuleId>> archiveDependencies = new HashMap<ModuleId, Set<ModuleId>>();
    for (ScriptArchive scriptArchive : candidateArchives) {
        ModuleId scriptModuleId = scriptArchive.getModuleSpec().getModuleId();

        // filter out archives that have a newer module already loaded
        long createTime = scriptArchive.getCreateTime();
        ScriptModule scriptModule = loadedScriptModules.get(scriptModuleId);
        long latestCreateTime = scriptModule != null ? scriptModule.getCreateTime() : 0;
        if (createTime < latestCreateTime) {
            notifyArchiveRejected(scriptArchive, ArchiveRejectedReason.HIGHER_REVISION_AVAILABLE, null);
            continue;
        }

        // create the new revisionIds that should be used for the linkages when the new modules
        // are defined.
        ModuleIdentifier newRevisionId = JBossModuleUtils.createRevisionId(scriptModuleId, updateNumber);
        updatedRevisionIdMap.put(scriptModuleId, newRevisionId);

        archivesToCompile.put(scriptModuleId, scriptArchive);

        // create a dependency map of the incoming archives so that we can later build a candidate graph
        archiveDependencies.put(scriptModuleId, scriptArchive.getModuleSpec().getModuleDependencies());
    }

    // create a dependency graph with the candidates swapped in in order to figure out the
    // order in which the candidates should be loaded
    DirectedGraph<ModuleId, DefaultEdge> candidateGraph = jbossModuleLoader.getModuleNameGraph();
    GraphUtils.swapVertices(candidateGraph, archiveDependencies);

    // iterate over the graph in reverse dependency order
    Set<ModuleId> leaves = GraphUtils.getLeafVertices(candidateGraph);
    while (!leaves.isEmpty()) {
        for (ModuleId scriptModuleId : leaves) {
            ScriptArchive scriptArchive = archivesToCompile.get(scriptModuleId);
            if (scriptArchive == null) {
                continue;
            }
            ModuleSpec moduleSpec;
            ModuleIdentifier candidateRevisionId = updatedRevisionIdMap.get(scriptModuleId);
            Path modulePath = createModulePath(candidateRevisionId);
            final Path moduleCompilationRoot = compilationRootDir.resolve(modulePath);
            FileUtils.deleteQuietly(moduleCompilationRoot.toFile());
            try {
                Files.createDirectories(moduleCompilationRoot);
            } catch (IOException ioe) {
                notifyArchiveRejected(scriptArchive, ArchiveRejectedReason.ARCHIVE_IO_EXCEPTION, ioe);
            }

            try {
                moduleSpec = createModuleSpec(scriptArchive, candidateRevisionId, updatedRevisionIdMap,
                        moduleCompilationRoot);
            } catch (ModuleLoadException e) {
                logger.error("Exception loading archive " + scriptArchive.getModuleSpec().getModuleId(), e);
                notifyArchiveRejected(scriptArchive, ArchiveRejectedReason.ARCHIVE_IO_EXCEPTION, e);
                continue;
            }

            // load and compile the module
            jbossModuleLoader.addModuleSpec(moduleSpec);
            Module jbossModule = null;
            try {
                jbossModule = jbossModuleLoader.loadModule(candidateRevisionId);
                compileModule(jbossModule, moduleCompilationRoot);

                // Now refresh the resource loaders for this module, and load the set of
                // compiled classes and populate into the module's local class cache.
                jbossModuleLoader.rescanModule(jbossModule);

                final Set<String> classesToLoad = new LinkedHashSet<String>();
                Files.walkFileTree(moduleCompilationRoot, new SimpleFileVisitor<Path>() {
                    public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
                        String relativePath = moduleCompilationRoot.relativize(file).toString();
                        if (relativePath.endsWith(".class")) {
                            String className = relativePath.replaceAll("\\.class$", "").replace("\\", ".")
                                    .replace("/", ".");
                            classesToLoad.add(className);
                        }
                        return FileVisitResult.CONTINUE;
                    };
                });
                for (String loadClass : classesToLoad) {
                    Class<?> loadedClass = jbossModule.getClassLoader().loadClassLocal(loadClass, true);
                    if (loadedClass == null)
                        throw new ScriptCompilationException("Unable to load compiled class: " + loadClass);
                }
            } catch (Exception e) {
                // rollback
                logger.error("Exception loading module " + candidateRevisionId, e);
                if (candidateArchives.contains(scriptArchive)) {
                    // this spec came from a candidate archive. Send reject notification
                    notifyArchiveRejected(scriptArchive, ArchiveRejectedReason.COMPILE_FAILURE, e);
                }
                if (jbossModule != null) {
                    jbossModuleLoader.unloadModule(jbossModule);
                }
                continue;
            }

            // commit the change by removing the old module
            ModuleIdentifier oldRevisionId = oldRevisionIdMap.get(scriptModuleId);
            if (oldRevisionId != null) {
                jbossModuleLoader.unloadModule(oldRevisionId);
            }

            JBossScriptModule scriptModule = new JBossScriptModule(scriptModuleId, jbossModule, scriptArchive);
            ScriptModule oldModule = loadedScriptModules.put(scriptModuleId, scriptModule);
            notifyModuleUpdate(scriptModule, oldModule);

            // find dependents and add them to the to be compiled set
            Set<ModuleId> dependents = GraphUtils.getIncomingVertices(candidateGraph, scriptModuleId);
            for (ModuleId dependentScriptModuleId : dependents) {
                if (!archivesToCompile.containsKey(dependentScriptModuleId)) {
                    ScriptModule dependentScriptModule = loadedScriptModules.get(dependentScriptModuleId);
                    if (dependentScriptModule != null) {
                        archivesToCompile.put(dependentScriptModuleId,
                                dependentScriptModule.getSourceArchive());
                        ModuleIdentifier dependentRevisionId = JBossModuleUtils
                                .createRevisionId(dependentScriptModuleId, updateNumber);
                        updatedRevisionIdMap.put(dependentScriptModuleId, dependentRevisionId);
                    }
                }
            }
        }

        GraphUtils.removeVertices(candidateGraph, leaves);
        leaves = GraphUtils.getLeafVertices(candidateGraph);
    }
}

From source file:de.teamgrit.grit.checking.compile.JavaCompileChecker.java

/**
 * Provides a DirectoryWalker to find submissions matching the '.java' file
 * extension.//from  w w  w  . j  a  v  a 2s  . c om
 *
 * @param pathToSourceFolder
 *            the folder in which the DirectoryWalker will search for files
 * @return a list of found files
 */
private List<Path> exploreDirectory(Path pathToSourceFolder) {
    RegexDirectoryWalker dirWalker = new RegexDirectoryWalker(".+\\.[Jj][Aa][Vv][Aa]");
    try {
        Files.walkFileTree(pathToSourceFolder, dirWalker);
    } catch (IOException e) {
        LOGGER.severe("Could not walk submission " + pathToSourceFolder.toString()
                + " while building compiler invocation: " + e.getMessage());
    }
    return dirWalker.getFoundFiles();
}

From source file:org.hawkular.inventory.impl.tinkerpop.test.BasicTest.java

private static void deleteGraph() throws Exception {
    Path path = Paths.get("./", "__tinker.graph");

    if (!path.toFile().exists()) {
        return;/*from   w w w .j ava2  s .c  o  m*/
    }

    Files.walkFileTree(path, new SimpleFileVisitor<Path>() {
        @Override
        public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
            Files.delete(file);
            return FileVisitResult.CONTINUE;
        }

        @Override
        public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException {
            Files.delete(dir);
            return FileVisitResult.CONTINUE;
        }
    });
}