Example usage for java.nio.file Path relativize

List of usage examples for java.nio.file Path relativize

Introduction

In this page you can find the example usage for java.nio.file Path relativize.

Prototype

Path relativize(Path other);

Source Link

Document

Constructs a relative path between this path and a given path.

Usage

From source file:org.apache.openaz.xacml.admin.components.PolicyWorkspace.java

protected void pushChanges(final File target) {
    try {/*from   www  .  ja  v a  2 s . co m*/
        //
        // Grab our working repository
        //
        Path repoPath = ((XacmlAdminUI) getUI()).getUserGitPath();
        final Git git = Git.open(repoPath.toFile());
        //
        // Get our status
        //
        final String base;
        Status status;
        if (target == null) {
            base = ".";
        } else {
            Path relativePath = repoPath.relativize(Paths.get(target.getPath()));
            base = relativePath.toString();
        }
        if (logger.isDebugEnabled()) {
            logger.debug("Status on base: " + base);
        }
        status = git.status().addPath(base).call();
        //
        // Check if its clean
        //
        if (status.isClean()) {
            //
            // Its clean
            //
            AdminNotification.warn(target.getName() + " is clean!");
            return;
        }
        //
        // Create the window
        //
        final GitPushWindow window = new GitPushWindow(git, target, status);
        window.setCaption("Push Changes");
        window.setModal(true);
        window.addCloseListener(new CloseListener() {
            private static final long serialVersionUID = 1L;

            @Override
            public void windowClose(CloseEvent e) {
                if (window.isSaved() == false) {
                    return;
                }
                try {
                    //
                    // Needs to be added first
                    //
                    DirCache cache = git.add().addFilepattern(base).call();
                    for (int i = 0; i < cache.getEntryCount(); i++) {
                        DirCacheEntry entry = cache.getEntry(i);
                        if (logger.isDebugEnabled()) {
                            logger.debug("Entry: " + entry);
                        }
                    }
                    //
                    // Next they need to be committed
                    //
                    RevCommit rev = git.commit().setMessage(window.getComment()).call();
                    if (logger.isDebugEnabled()) {
                        logger.debug("RevCommit: " + rev);
                    }
                    //
                    // Now we can push changes to the Git repository
                    //
                    Iterable<PushResult> results = git.push().call();
                    for (PushResult result : results) {
                        logger.info(result);
                    }
                    //
                    // Have the container fire an item set change notification
                    //
                    self.treeContainer.updateItem(target);
                } catch (NoWorkTreeException | GitAPIException e1) {
                    logger.error(e);
                    AdminNotification.error("Exception occurred while trying to push: " + e1);
                }
            }

        });
        window.center();
        UI.getCurrent().addWindow(window);
    } catch (IOException | GitAPIException e) {
        logger.error(e);
        AdminNotification.error("Exception occurred while trying to get status: " + e);
    }
}

From source file:gda.data.scan.datawriter.NexusDataWriter.java

void writeHere(NeXusFileInterface file, INexusTree tree, boolean makeData, boolean attrOnly,
        List<SelfCreatingLink> links) throws NexusException {
    if (!tree.isPointDependent() && !makeData) {
        return;/* ww w .j  a v a  2  s  .c o m*/
    }
    String name = tree.getName();
    String nxClass = tree.getNxClass();
    Boolean dataOpen = false;
    Boolean loopNodes = true;
    Boolean attrBelowThisOnly = attrOnly;
    Boolean nxClassIsSDS = nxClass.equals(NexusExtractor.SDSClassName);
    Boolean nxClassIsAttr = nxClass.equals(NexusExtractor.AttrClassName);
    Boolean nxClassIsExternalSDS = nxClass.equals(NexusExtractor.ExternalSDSLink);
    if (nxClassIsExternalSDS) {
        if (makeData) {
            NexusGroupData data = tree.getData();
            try {
                /**
                 * Create a link of the format
                 * "nxfile://" + path to external file relative to nxs file + "#" + address 
                 * 
                 * The buffer in data contains 
                 * "nxfile://" + abs path to external file + "#" + address
                 * 
                 * so we need to replace the abs path with the relative path
                 */
                String link = new String((byte[]) data.getBuffer(), "UTF-8");
                //link is of format nxfile:// + filepath + # + address
                String[] linkParts = link.split("nxfile://");
                if (linkParts.length != 2) {
                    throw new NexusException("Invalid format for external link " + StringUtils.quote(link));
                }
                String[] parts = linkParts[1].split("#");
                if (parts.length != 2) {
                    throw new NexusException("Invalid format for external link " + StringUtils.quote(link));
                }
                Path absExtPath = Paths.get(parts[0]);
                String address = parts[1];
                File f = absExtPath.toFile();
                if (!f.exists())
                    logger.warn("file " + absExtPath + " does not exist at time of adding link");
                Path nxsFile = Paths.get(nexusFileUrl);
                Path nxsParent = nxsFile.getParent();
                Path relativize = nxsParent.relativize(absExtPath);
                String relativeLink = "nxfile://" + relativize + "#" + address;
                file.linkexternaldataset(name, relativeLink);
                links.add(new ExternalNXlink(name, relativeLink));
            } catch (UnsupportedEncodingException e) {
                throw new NexusException(
                        "supported encoding in creating string for external linking -- this should never happen");
            }
        }
        return;
    }
    if (nxClassIsAttr) {
        if (makeData) {
            NexusGroupData data = tree.getData();
            if (data != null && data.getBuffer() != null) {
                if ("axis".equals(name) || "label".equals(name)) {
                    Integer axisno = getIntfromBuffer(data.getBuffer());
                    axisno += thisPoint.getScanDimensions().length;
                    file.putattr(name, axisno.toString().getBytes(), NexusFile.NX_CHAR);
                } else {
                    file.putattr(name, data.getBuffer(), data.type);
                }
            }
        }
        return;
    }
    if (attrOnly) {
        return;
    }
    if (!name.isEmpty() && !nxClass.isEmpty()) {
        if (!nxClassIsSDS) {
            if (!(file.groupdir().containsKey(name) && file.groupdir().get(name).equals(nxClass))) {
                file.makegroup(name, nxClass);
            }
            file.opengroup(name, nxClass);
        }

        NexusGroupData sds = tree.getData();
        if (sds != null) {
            if (sds.dimensions != null) {
                for (int i : sds.dimensions) {
                    if (i == 0)
                        throw new NexusException("Data for " + name + " is invalid. SDS Dimension = 0");
                }
            }
            if (makeData) {
                int[] dataDimMake = generateDataDim(tree.isPointDependent(),
                        tree.isPointDependent() ? scanDimensions : null, sds.dimensions);

                if (sds.dimensions != null && sds.dimensions.length > 1) {
                    int[] chunks = Arrays.copyOf(dataDimMake, dataDimMake.length);
                    for (int i = 0; i < chunks.length; i++) {
                        if (chunks[i] == -1)
                            chunks[i] = 1;
                    }
                    if (sds.chunkDimensions != null && sds.chunkDimensions.length <= chunks.length) {
                        int lendiff = chunks.length - sds.chunkDimensions.length;
                        for (int i = 0; i < sds.chunkDimensions.length; i++) {
                            chunks[i + lendiff] = dataDimMake[i + lendiff] == -1 ? sds.chunkDimensions[i]
                                    : Math.min(sds.chunkDimensions[i], chunks[i + lendiff]);
                        }
                    }
                    int compression = sds.compressionType != null ? sds.compressionType
                            : NexusFile.NX_COMP_LZW_LVL1;
                    file.compmakedata(name, sds.type, dataDimMake.length, dataDimMake, compression, chunks);
                } else {
                    file.makedata(name, sds.type, dataDimMake.length, dataDimMake);
                }

                file.opendata(name);
                if (!tree.isPointDependent()) {
                    int[] dataDim = generateDataDim(false, null, sds.dimensions);
                    int[] dataStartPos = generateDataStartPos(null, sds.dimensions);
                    file.putslab(sds.getBuffer(), dataStartPos, dataDim);
                }
                if (links != null && sds.isDetectorEntryData) {
                    links.add(new SelfCreatingLink(file.getdataID()));
                }

                dataOpen = true;
                attrBelowThisOnly = true;
            } else {
                int[] dataDim = generateDataDim(false, dataDimPrefix, sds.dimensions);
                int[] dataStartPos = generateDataStartPos(dataStartPosPrefix, sds.dimensions);

                // Open data array.
                file.opendata(name);

                file.putslab(sds.getBuffer(), dataStartPos, dataDim);
                dataOpen = true;

                // Close data - do not add children as attributes added for first point only
                loopNodes = false;

            }
        }
    } else {
        logger.warn("Name or class is empty:");
    }
    try {
        if (loopNodes) {
            for (INexusTree branch : tree) {
                writeHere(file, branch, makeData, attrBelowThisOnly, links);
            }
        }
    } finally {
        if (dataOpen) {
            file.closedata();
        }
        if (!name.isEmpty() && !nxClass.isEmpty() && !nxClassIsSDS) {
            file.closegroup();
        }
    }
}

From source file:processing.app.debug.Compiler.java

private void copyAdditionalFilesToBuildFolderSavingOriginalFolderStructure(SketchData sketch, String buildPath)
        throws RunnerException {
    Path sketchPath = Paths.get(sketch.getFolder().getAbsolutePath());
    Stream<Path> otherFilesStream;
    try {//from w  ww  . ja  v  a  2 s.  c o m
        otherFilesStream = Files.find(sketchPath, ADDITIONAL_FILES_COPY_MAX_DEPTH,
                (path, attribs) -> !attribs.isDirectory() && isPathInASubfolder(sketchPath, path)
                        && FileUtils.hasExtension(path.toFile(), SketchData.OTHER_ALLOWED_EXTENSIONS));
    } catch (IOException e) {
        throw new RunnerException(e);
    }
    otherFilesStream
            .map((path) -> new Pair<>(path, Paths.get(buildPath, sketchPath.relativize(path).toString())))
            .forEach((pair) -> {
                try {
                    Files.createDirectories(pair.value.getParent());
                    Files.copy(pair.key, pair.value, StandardCopyOption.REPLACE_EXISTING);
                } catch (IOException e) {
                    e.printStackTrace();
                    throw new RuntimeException(I18n.format(_("Problem moving {0} to the build folder"),
                            sketchPath.relativize(pair.key).toString()));
                }
            });
}

From source file:processing.app.debug.OldCompiler.java

private void copyAdditionalFilesToBuildFolderSavingOriginalFolderStructure(SketchData sketch, String buildPath)
        throws RunnerException {
    Path sketchPath = Paths.get(sketch.getFolder().getAbsolutePath());
    Stream<Path> otherFilesStream;
    try {// w  w w.j  a  va2 s .  c  o  m
        otherFilesStream = Files.find(sketchPath, ADDITIONAL_FILES_COPY_MAX_DEPTH,
                (path, attribs) -> !attribs.isDirectory() && isPathInASubfolder(sketchPath, path)
                        && FileUtils.hasExtension(path.toFile(), SketchData.OTHER_ALLOWED_EXTENSIONS));
    } catch (IOException e) {
        throw new RunnerException(e);
    }
    otherFilesStream
            .map((path) -> new Pair<>(path, Paths.get(buildPath, sketchPath.relativize(path).toString())))
            .forEach((pair) -> {
                try {
                    Files.createDirectories(pair.value.getParent());
                    Files.copy(pair.key, pair.value, StandardCopyOption.REPLACE_EXISTING);
                } catch (IOException e) {
                    e.printStackTrace();
                    throw new RuntimeException(I18n.format(tr("Problem moving {0} to the build folder"),
                            sketchPath.relativize(pair.key).toString()));
                }
            });
}

From source file:io.fabric8.docker.client.impl.BuildImage.java

@Override
public OutputHandle fromFolder(String path) {
    try {/*from ww  w  .ja v  a 2  s  .  co m*/
        final Path root = Paths.get(path);
        final Path dockerIgnore = root.resolve(DOCKER_IGNORE);
        final List<String> ignorePatterns = new ArrayList<>();
        if (dockerIgnore.toFile().exists()) {
            for (String p : Files.readAllLines(dockerIgnore, UTF_8)) {
                ignorePatterns.add(path.endsWith(File.separator) ? path + p : path + File.separator + p);
            }
        }

        final DockerIgnorePathMatcher dockerIgnorePathMatcher = new DockerIgnorePathMatcher(ignorePatterns);

        File tempFile = Files.createTempFile(Paths.get(DEFAULT_TEMP_DIR), DOCKER_PREFIX, BZIP2_SUFFIX).toFile();

        try (FileOutputStream fout = new FileOutputStream(tempFile);
                BufferedOutputStream bout = new BufferedOutputStream(fout);
                BZip2CompressorOutputStream bzout = new BZip2CompressorOutputStream(bout);
                final TarArchiveOutputStream tout = new TarArchiveOutputStream(bzout)) {
            Files.walkFileTree(root, new SimpleFileVisitor<Path>() {

                @Override
                public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs)
                        throws IOException {
                    if (dockerIgnorePathMatcher.matches(dir)) {
                        return FileVisitResult.SKIP_SUBTREE;
                    }
                    return FileVisitResult.CONTINUE;
                }

                @Override
                public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
                    if (dockerIgnorePathMatcher.matches(file)) {
                        return FileVisitResult.SKIP_SUBTREE;
                    }

                    final Path relativePath = root.relativize(file);
                    final TarArchiveEntry entry = new TarArchiveEntry(file.toFile());
                    entry.setName(relativePath.toString());
                    entry.setMode(TarArchiveEntry.DEFAULT_FILE_MODE);
                    entry.setSize(attrs.size());
                    tout.putArchiveEntry(entry);
                    Files.copy(file, tout);
                    tout.closeArchiveEntry();
                    return FileVisitResult.CONTINUE;
                }
            });
            fout.flush();
        }
        return fromTar(tempFile.getAbsolutePath());

    } catch (IOException e) {
        throw DockerClientException.launderThrowable(e);
    }
}

From source file:com.netflix.nicobar.core.module.ScriptModuleLoader.java

/**
 * Add or update the existing {@link ScriptModule}s with the given script archives.
 * This method will convert the archives to modules and then compile + link them in to the
 * dependency graph. It will then recursively re-link any modules depending on the new modules.
 * If this loader already contains an old version of the module, it will be unloaded on
 * successful compile of the new module.
 *
 * @param candidateArchives archives to load or update
 *//*from w ww .ja  v a  2s.  co m*/
public synchronized void updateScriptArchives(Set<? extends ScriptArchive> candidateArchives) {
    Objects.requireNonNull(candidateArchives);
    long updateNumber = System.currentTimeMillis();

    // map script module id to archive to be compiled
    Map<ModuleId, ScriptArchive> archivesToCompile = new HashMap<ModuleId, ScriptArchive>(
            candidateArchives.size() * 2);

    // create an updated mapping of the scriptModuleId to latest revisionId including the yet-to-be-compiled archives
    Map<ModuleId, ModuleIdentifier> oldRevisionIdMap = jbossModuleLoader.getLatestRevisionIds();
    Map<ModuleId, ModuleIdentifier> updatedRevisionIdMap = new HashMap<ModuleId, ModuleIdentifier>(
            (oldRevisionIdMap.size() + candidateArchives.size()) * 2);
    updatedRevisionIdMap.putAll(oldRevisionIdMap);

    // Map of the scriptModuleId to it's updated set of dependencies
    Map<ModuleId, Set<ModuleId>> archiveDependencies = new HashMap<ModuleId, Set<ModuleId>>();
    for (ScriptArchive scriptArchive : candidateArchives) {
        ModuleId scriptModuleId = scriptArchive.getModuleSpec().getModuleId();

        // filter out archives that have a newer module already loaded
        long createTime = scriptArchive.getCreateTime();
        ScriptModule scriptModule = loadedScriptModules.get(scriptModuleId);
        long latestCreateTime = scriptModule != null ? scriptModule.getCreateTime() : 0;
        if (createTime < latestCreateTime) {
            notifyArchiveRejected(scriptArchive, ArchiveRejectedReason.HIGHER_REVISION_AVAILABLE, null);
            continue;
        }

        // create the new revisionIds that should be used for the linkages when the new modules
        // are defined.
        ModuleIdentifier newRevisionId = JBossModuleUtils.createRevisionId(scriptModuleId, updateNumber);
        updatedRevisionIdMap.put(scriptModuleId, newRevisionId);

        archivesToCompile.put(scriptModuleId, scriptArchive);

        // create a dependency map of the incoming archives so that we can later build a candidate graph
        archiveDependencies.put(scriptModuleId, scriptArchive.getModuleSpec().getModuleDependencies());
    }

    // create a dependency graph with the candidates swapped in in order to figure out the
    // order in which the candidates should be loaded
    DirectedGraph<ModuleId, DefaultEdge> candidateGraph = jbossModuleLoader.getModuleNameGraph();
    GraphUtils.swapVertices(candidateGraph, archiveDependencies);

    // iterate over the graph in reverse dependency order
    Set<ModuleId> leaves = GraphUtils.getLeafVertices(candidateGraph);
    while (!leaves.isEmpty()) {
        for (ModuleId scriptModuleId : leaves) {
            ScriptArchive scriptArchive = archivesToCompile.get(scriptModuleId);
            if (scriptArchive == null) {
                continue;
            }
            ModuleSpec moduleSpec;
            ModuleIdentifier candidateRevisionId = updatedRevisionIdMap.get(scriptModuleId);
            Path modulePath = createModulePath(candidateRevisionId);
            final Path moduleCompilationRoot = compilationRootDir.resolve(modulePath);
            FileUtils.deleteQuietly(moduleCompilationRoot.toFile());
            try {
                Files.createDirectories(moduleCompilationRoot);
            } catch (IOException ioe) {
                notifyArchiveRejected(scriptArchive, ArchiveRejectedReason.ARCHIVE_IO_EXCEPTION, ioe);
            }

            try {
                moduleSpec = createModuleSpec(scriptArchive, candidateRevisionId, updatedRevisionIdMap,
                        moduleCompilationRoot);
            } catch (ModuleLoadException e) {
                logger.error("Exception loading archive " + scriptArchive.getModuleSpec().getModuleId(), e);
                notifyArchiveRejected(scriptArchive, ArchiveRejectedReason.ARCHIVE_IO_EXCEPTION, e);
                continue;
            }

            // load and compile the module
            jbossModuleLoader.addModuleSpec(moduleSpec);
            Module jbossModule = null;
            try {
                jbossModule = jbossModuleLoader.loadModule(candidateRevisionId);
                compileModule(jbossModule, moduleCompilationRoot);

                // Now refresh the resource loaders for this module, and load the set of
                // compiled classes and populate into the module's local class cache.
                jbossModuleLoader.rescanModule(jbossModule);

                final Set<String> classesToLoad = new LinkedHashSet<String>();
                Files.walkFileTree(moduleCompilationRoot, new SimpleFileVisitor<Path>() {
                    public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
                        String relativePath = moduleCompilationRoot.relativize(file).toString();
                        if (relativePath.endsWith(".class")) {
                            String className = relativePath.replaceAll("\\.class$", "").replace("\\", ".")
                                    .replace("/", ".");
                            classesToLoad.add(className);
                        }
                        return FileVisitResult.CONTINUE;
                    };
                });
                for (String loadClass : classesToLoad) {
                    Class<?> loadedClass = jbossModule.getClassLoader().loadClassLocal(loadClass, true);
                    if (loadedClass == null)
                        throw new ScriptCompilationException("Unable to load compiled class: " + loadClass);
                }
            } catch (Exception e) {
                // rollback
                logger.error("Exception loading module " + candidateRevisionId, e);
                if (candidateArchives.contains(scriptArchive)) {
                    // this spec came from a candidate archive. Send reject notification
                    notifyArchiveRejected(scriptArchive, ArchiveRejectedReason.COMPILE_FAILURE, e);
                }
                if (jbossModule != null) {
                    jbossModuleLoader.unloadModule(jbossModule);
                }
                continue;
            }

            // commit the change by removing the old module
            ModuleIdentifier oldRevisionId = oldRevisionIdMap.get(scriptModuleId);
            if (oldRevisionId != null) {
                jbossModuleLoader.unloadModule(oldRevisionId);
            }

            JBossScriptModule scriptModule = new JBossScriptModule(scriptModuleId, jbossModule, scriptArchive);
            ScriptModule oldModule = loadedScriptModules.put(scriptModuleId, scriptModule);
            notifyModuleUpdate(scriptModule, oldModule);

            // find dependents and add them to the to be compiled set
            Set<ModuleId> dependents = GraphUtils.getIncomingVertices(candidateGraph, scriptModuleId);
            for (ModuleId dependentScriptModuleId : dependents) {
                if (!archivesToCompile.containsKey(dependentScriptModuleId)) {
                    ScriptModule dependentScriptModule = loadedScriptModules.get(dependentScriptModuleId);
                    if (dependentScriptModule != null) {
                        archivesToCompile.put(dependentScriptModuleId,
                                dependentScriptModule.getSourceArchive());
                        ModuleIdentifier dependentRevisionId = JBossModuleUtils
                                .createRevisionId(dependentScriptModuleId, updateNumber);
                        updatedRevisionIdMap.put(dependentScriptModuleId, dependentRevisionId);
                    }
                }
            }
        }

        GraphUtils.removeVertices(candidateGraph, leaves);
        leaves = GraphUtils.getLeafVertices(candidateGraph);
    }
}

From source file:org.apache.nifi.controller.repository.FileSystemRepository.java

private void removeIncompleteContent(final String containerName, final Path containerPath,
        final Path fileToRemove) {
    if (Files.isDirectory(fileToRemove)) {
        final Path lastPathName = fileToRemove.subpath(1, fileToRemove.getNameCount());
        final String fileName = lastPathName.toFile().getName();
        if (fileName.equals(ARCHIVE_DIR_NAME)) {
            return;
        }/*from w  w w.  j  av  a2s .com*/

        final File[] children = fileToRemove.toFile().listFiles();
        if (children != null) {
            for (final File child : children) {
                removeIncompleteContent(containerName, containerPath, child.toPath());
            }
        }

        return;
    }

    final Path relativePath = containerPath.relativize(fileToRemove);
    final Path sectionPath = relativePath.subpath(0, 1);
    if (relativePath.getNameCount() < 2) {
        return;
    }

    final Path idPath = relativePath.subpath(1, relativePath.getNameCount());
    final String id = idPath.toFile().getName();
    final String sectionName = sectionPath.toFile().getName();

    final ResourceClaim resourceClaim = resourceClaimManager.newResourceClaim(containerName, sectionName, id,
            false, false);
    if (resourceClaimManager.getClaimantCount(resourceClaim) == 0) {
        removeIncompleteContent(fileToRemove);
    }
}

From source file:org.apache.openaz.xacml.admin.components.PolicyWorkspace.java

@Override
public InputStream getStream() {
    ////from   w  w  w . j av a 2  s.c  o  m
    // Grab our working repository
    //
    final Path repoPath = ((XacmlAdminUI) getUI()).getUserGitPath();
    Path workspacePath = ((XacmlAdminUI) getUI()).getUserWorkspace();
    final Path tarFile = Paths.get(workspacePath.toString(), "Repository.tgz");

    try (OutputStream os = Files.newOutputStream(tarFile)) {
        try (GzipCompressorOutputStream gzOut = new GzipCompressorOutputStream(os)) {
            try (TarArchiveOutputStream tarOut = new TarArchiveOutputStream(gzOut)) {

                tarOut.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU);

                Files.walkFileTree(repoPath, new SimpleFileVisitor<Path>() {

                    @Override
                    public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs)
                            throws IOException {
                        if (dir.getFileName().toString().startsWith(".git")) {
                            return FileVisitResult.SKIP_SUBTREE;
                        }
                        Path relative = repoPath.relativize(dir);
                        if (relative.toString().isEmpty()) {
                            return super.preVisitDirectory(dir, attrs);
                        }
                        TarArchiveEntry entry = new TarArchiveEntry(relative.toFile());
                        tarOut.putArchiveEntry(entry);
                        tarOut.closeArchiveEntry();
                        return super.preVisitDirectory(dir, attrs);
                    }

                    @Override
                    public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
                        if (file.getFileName().toString().endsWith(".xml") == false) {
                            return super.visitFile(file, attrs);
                        }
                        Path relative = repoPath.relativize(file);
                        TarArchiveEntry entry = new TarArchiveEntry(relative.toFile());
                        entry.setSize(Files.size(file));
                        tarOut.putArchiveEntry(entry);
                        try {
                            IOUtils.copy(Files.newInputStream(file), tarOut);
                        } catch (IOException e) {
                            logger.error(e);
                        }
                        tarOut.closeArchiveEntry();
                        return super.visitFile(file, attrs);
                    }

                });
                tarOut.finish();
            }
        }
    } catch (IOException e) {
        logger.error(e);
    }
    try {
        return Files.newInputStream(tarFile);
    } catch (IOException e) {
        logger.error(e);
    }
    return null;
}

From source file:gov.vha.isaac.rf2.filter.RF2Filter.java

@Override
public void execute() throws MojoExecutionException {
    if (!inputDirectory.exists() || !inputDirectory.isDirectory()) {
        throw new MojoExecutionException("Path doesn't exist or isn't a folder: " + inputDirectory);
    }//www . j a  v  a  2  s  .c  om

    if (module == null) {
        throw new MojoExecutionException("You must provide a module or namespace for filtering");
    }

    moduleStrings_.add(module + "");

    outputDirectory.mkdirs();
    File temp = new File(outputDirectory, inputDirectory.getName());
    temp.mkdirs();

    Path source = inputDirectory.toPath();
    Path target = temp.toPath();
    try {
        getLog().info("Reading from " + inputDirectory.getAbsolutePath());
        getLog().info("Writing to " + outputDirectory.getCanonicalPath());

        summary_.append("This content was filtered by an RF2 filter tool.  The parameters were module: "
                + module + " software version: " + converterVersion);
        summary_.append("\r\n\r\n");

        getLog().info("Checking for nested child modules");

        //look in sct2_Relationship_ files, find anything where the 6th column (destinationId) is the 
        //starting module ID concept - and add that sourceId (5th column) to our list of modules to extract
        Files.walkFileTree(source, new SimpleFileVisitor<Path>() {
            @Override
            public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
                if (file.toFile().getName().startsWith("sct2_Relationship_")) {
                    //don't look for quotes, the data is bad, and has floating instances of '"' all by itself
                    CSVReader csvReader = new CSVReader(
                            new InputStreamReader(new FileInputStream(file.toFile())), '\t',
                            CSVParser.NULL_CHARACTER);
                    String[] line = csvReader.readNext();
                    if (!line[4].equals("sourceId") || !line[5].equals("destinationId")) {
                        csvReader.close();
                        throw new IOException("Unexpected error looking for nested modules");
                    }
                    line = csvReader.readNext();
                    while (line != null) {
                        if (line[5].equals(moduleStrings_.get(0))) {
                            moduleStrings_.add(line[4]);
                        }
                        line = csvReader.readNext();
                    }
                    csvReader.close();
                }
                return FileVisitResult.CONTINUE;
            }
        });

        log("Full module list (including detected nested modules: "
                + Arrays.toString(moduleStrings_.toArray(new String[moduleStrings_.size()])));

        Files.walkFileTree(source, new SimpleFileVisitor<Path>() {
            @Override
            public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException {
                Path targetdir = target.resolve(source.relativize(dir));
                try {
                    //this just creates the sub-directory in the target
                    Files.copy(dir, targetdir);
                } catch (FileAlreadyExistsException e) {
                    if (!Files.isDirectory(targetdir))
                        throw e;
                }
                return FileVisitResult.CONTINUE;
            }

            @Override
            public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
                handleFile(file, target.resolve(source.relativize(file)));
                return FileVisitResult.CONTINUE;
            }
        });

        Files.write(new File(temp, "FilterInfo.txt").toPath(), summary_.toString().getBytes(),
                StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING);
    } catch (IOException e) {
        throw new MojoExecutionException("Failure", e);
    }

    getLog().info("Filter Complete");

}

From source file:org.tinymediamanager.core.movie.tasks.MovieUpdateDatasourceTask2.java

private void parseMovieDirectory(Path movieDir, Path dataSource) {
    List<Path> movieDirList = listFilesAndDirs(movieDir);
    ArrayList<Path> files = new ArrayList<>();
    ArrayList<Path> dirs = new ArrayList<>(); // FIXME: what for....?
    HashSet<String> normalizedVideoFiles = new HashSet<>(); // just for
                                                            // identifying MMD

    boolean isDiscFolder = false;
    boolean isMultiMovieDir = false;
    boolean videoFileFound = false;
    Path movieRoot = movieDir; // root set to current dir - might be adjusted by
                               // disc folders

    for (Path path : movieDirList) {
        if (Utils.isRegularFile(path)) {
            files.add(path.toAbsolutePath());

            // do not construct a fully MF yet
            // just minimal to get the type out of filename
            MediaFile mf = new MediaFile();
            mf.setPath(path.getParent().toString());
            mf.setFilename(path.getFileName().toString());
            mf.setType(mf.parseType());/*ww  w  .  j av a  2  s. co m*/

            // System.out.println("************ " + mf);
            if (mf.getType() == MediaFileType.VIDEO) {
                videoFileFound = true;
                if (mf.isDiscFile()) {
                    isDiscFolder = true;
                    break; // step out - this is all we need to know
                } else {
                    // detect unique basename, without stacking etc
                    String[] ty = ParserUtils.detectCleanMovienameAndYear(
                            FilenameUtils.getBaseName(Utils.cleanStackingMarkers(mf.getFilename())));
                    normalizedVideoFiles.add(ty[0] + ty[1]);
                }
            }
        } else if (Files.isDirectory(path)) {
            dirs.add(path.toAbsolutePath());
        }
    }

    if (!videoFileFound) {
        // hmm... we never found a video file (but maybe others, trailers) so NO
        // need to parse THIS folder
        return;
    }

    if (isDiscFolder) {
        // if inside own DiscFolder, walk backwards till movieRoot folder
        Path relative = dataSource.relativize(movieDir);
        while (relative.toString().toUpperCase(Locale.ROOT).contains("VIDEO_TS")
                || relative.toString().toUpperCase(Locale.ROOT).contains("BDMV")) {
            movieDir = movieDir.getParent();
            relative = dataSource.relativize(movieDir);
        }
        movieRoot = movieDir;
    } else {
        // no VIDEO files in this dir - skip this folder
        if (normalizedVideoFiles.size() == 0) {
            return;
        }
        // more than one (unstacked) movie file in directory (or DS root) -> must
        // parsed as multiMovieDir
        if (normalizedVideoFiles.size() > 1 || movieDir.equals(dataSource)) {
            isMultiMovieDir = true;
        }
    }

    if (cancel) {
        return;
    }
    // ok, we're ready to parse :)
    if (isMultiMovieDir) {
        createMultiMovieFromDir(dataSource, movieRoot, files);
    } else {
        createSingleMovieFromDir(dataSource, movieRoot, isDiscFolder);
    }

}