Example usage for java.nio.file FileVisitOption FOLLOW_LINKS

List of usage examples for java.nio.file FileVisitOption FOLLOW_LINKS

Introduction

In this page you can find the example usage for java.nio.file FileVisitOption FOLLOW_LINKS.

Prototype

FileVisitOption FOLLOW_LINKS

To view the source code for java.nio.file FileVisitOption FOLLOW_LINKS.

Click Source Link

Document

Follow symbolic links.

Usage

From source file:gov.noaa.pfel.coastwatch.util.FileVisitorDNLS.java

/**
 * This is a convenience method for using this class. 
 * <p>This works with Amazon AWS S3 bucket URLs. Internal /'s in the keys will be
 * treated as folder separators. If there aren't any /'s, all the keys will 
 * be in the root directory.//from   w w w  .j  a va2 s. c  om
 *
 * @param tDir The starting directory, with \\ or /, with or without trailing slash.  
 *    The resulting directoryPA will contain dirs with matching slashes and trailing slash.
 * @param tPathRegex a regex to constrain which subdirs to include.
 *   This is ignored if recursive is false.
 *   null or "" is treated as .* (i.e., match everything).
 * @param tDirectoriesToo if true, each directory name will get its own rows 
 *   in the results.
 * @return a table with columns with DIRECTORY, NAME, LASTMODIFIED, and SIZE columns.
 *    LASTMODIFIED and SIZE are LongArrays -- For directories when the values
 *    are otherwise unknown, the value will be Long.MAX_VALUE.
 *    If directoriesToo=true, the original dir won't be included and any 
 *    directory's file NAME will be "".
 * @throws IOException if trouble
 */
public static Table oneStep(String tDir, String tFileNameRegex, boolean tRecursive, String tPathRegex,
        boolean tDirectoriesToo) throws IOException {
    long time = System.currentTimeMillis();

    //is tDir an http URL?
    if (tDir.matches(FileVisitorDNLS.HTTP_REGEX)) {

        //Is it an S3 bucket with "files"?
        //If testing a "dir", url should have a trailing slash.
        Matcher matcher = AWS_S3_PATTERN.matcher(File2.addSlash(tDir)); //force trailing slash
        if (matcher.matches()) {
            //http://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketGET.html
            //If files have file-system-like names, e.g., 
            //  http://bucketname.s3.amazonaws.com/dir1/dir2/fileName.ext)
            //  http://nasanex.s3.amazonaws.com/NEX-DCP30/BCSD/rcp26/mon/atmos/tasmin/r1i1p1/v1.0/CONUS/tasmin_amon_BCSD_rcp26_r1i1p1_CONUS_NorESM1-M_209601-209912.nc
            //  you still can't request just dir2 info because they aren't directories.
            //  They are just object keys with internal slashes. 
            //So specify prefix in request.
            Table table = makeEmptyTable();
            StringArray directoryPA = (StringArray) table.getColumn(DIRECTORY);
            StringArray namePA = (StringArray) table.getColumn(NAME);
            LongArray lastModifiedPA = (LongArray) table.getColumn(LASTMODIFIED);
            LongArray sizePA = (LongArray) table.getColumn(SIZE);

            String bucketName = matcher.group(1);
            String prefix = matcher.group(2);
            String baseURL = tDir.substring(0, matcher.start(2));
            AmazonS3 s3client = new AmazonS3Client(new ProfileCredentialsProvider());
            try {
                if (verbose)
                    String2.log("FileVisitorDNLS.oneStep getting info from AWS S3 at" + "\nURL=" + tDir);
                //"\nbucket=" + bucketName + " prefix=" + prefix);

                //I wanted to generate lastMod for dir based on lastMod of files
                //but it would be inconsistent for different requests (recursive, fileNameRegex).
                //so just a set of dir names.
                HashSet<String> dirHashSet = new HashSet();
                ListObjectsRequest listObjectsRequest = new ListObjectsRequest().withBucketName(bucketName)
                        .withPrefix(prefix);
                ObjectListing objectListing;
                do {
                    objectListing = s3client.listObjects(listObjectsRequest);
                    for (S3ObjectSummary objectSummary : objectListing.getObjectSummaries()) {
                        String keyFullName = objectSummary.getKey();
                        String keyDir = File2.getDirectory(baseURL + keyFullName);
                        String keyName = File2.getNameAndExtension(keyFullName);
                        if (debugMode)
                            String2.log(
                                    "keyFullName=" + keyFullName + "\nkeyDir=" + keyDir + "\n  tDir=" + tDir);
                        if (keyDir.startsWith(tDir) && //it should
                                (tRecursive || keyDir.length() == tDir.length())) {

                            //store this dir
                            if (tDirectoriesToo) {
                                //S3 only returns object keys. I must infer/collect directories.
                                //Store this dir and parents back to tDir.
                                String choppedKeyDir = keyDir;
                                while (choppedKeyDir.length() >= tDir.length()) {
                                    if (!dirHashSet.add(choppedKeyDir))
                                        break; //hash set already had this, so will already have parents

                                    //chop off last subdirectory
                                    choppedKeyDir = File2.getDirectory(
                                            choppedKeyDir.substring(0, choppedKeyDir.length() - 1)); //remove trailing /
                                }
                            }

                            //store this file's information
                            //Sometimes directories appear as files are named "" with size=0.
                            //I don't store those as files.
                            if (debugMode)
                                String2.log("keyName=" + keyFullName + "\n tFileNameRegex=" + tFileNameRegex
                                        + " matches=" + keyName.matches(tFileNameRegex));
                            if (keyName.length() > 0 && keyName.matches(tFileNameRegex)) {
                                directoryPA.add(keyDir);
                                namePA.add(keyName);
                                lastModifiedPA.add(objectSummary.getLastModified().getTime()); //epoch millis
                                sizePA.add(objectSummary.getSize()); //long
                            }
                        }
                    }
                    listObjectsRequest.setMarker(objectListing.getNextMarker());
                } while (objectListing.isTruncated());

                //add directories to the table
                if (tDirectoriesToo) {
                    Iterator<String> it = dirHashSet.iterator();
                    while (it.hasNext()) {
                        directoryPA.add(it.next());
                        namePA.add("");
                        lastModifiedPA.add(Long.MAX_VALUE);
                        sizePA.add(Long.MAX_VALUE);
                    }
                }

                table.leftToRightSortIgnoreCase(2);
                return table;

            } catch (AmazonServiceException ase) {
                throw new IOException("AmazonServiceException: " + ase.getErrorType() + " ERROR, HTTP Code="
                        + ase.getStatusCode() + ": " + ase.getMessage(), ase);
            } catch (AmazonClientException ace) {
                throw new IOException(ace.getMessage(), ace);
            }
        }

        //HYRAX before THREDDS
        //http://dods.jpl.nasa.gov/opendap/ocean_wind/ccmp/L3.5a/data/flk/1988/
        matcher = HYRAX_PATTERN.matcher(tDir);
        if (matcher.matches()) {
            try {
                if (verbose)
                    String2.log("FileVisitorDNLS.oneStep getting info from Hyrax at" + "\nURL=" + tDir);
                Table table = makeEmptyTable();
                StringArray directoryPA = (StringArray) table.getColumn(DIRECTORY);
                StringArray namePA = (StringArray) table.getColumn(NAME);
                LongArray lastModifiedPA = (LongArray) table.getColumn(LASTMODIFIED);
                LongArray sizePA = (LongArray) table.getColumn(SIZE);

                DoubleArray lastModDA = new DoubleArray();
                addToHyraxUrlList(tDir, tFileNameRegex, tRecursive, tPathRegex, tDirectoriesToo, namePA,
                        lastModDA, sizePA);
                lastModifiedPA.append(lastModDA);
                int n = namePA.size();
                for (int i = 0; i < n; i++) {
                    String fn = namePA.get(i);
                    directoryPA.add(File2.getDirectory(fn));
                    namePA.set(i, File2.getNameAndExtension(fn));
                }

                table.leftToRightSortIgnoreCase(2);
                return table;
            } catch (Throwable t) {
                throw new IOException(t.getMessage(), t);
            }
        }

        //THREDDS
        matcher = THREDDS_PATTERN.matcher(tDir);
        if (matcher.matches()) {
            try {
                if (verbose)
                    String2.log("FileVisitorDNLS.oneStep getting info from THREDDS at" + "\nURL=" + tDir);
                Table table = makeEmptyTable();
                StringArray directoryPA = (StringArray) table.getColumn(DIRECTORY);
                StringArray namePA = (StringArray) table.getColumn(NAME);
                LongArray lastModifiedPA = (LongArray) table.getColumn(LASTMODIFIED);
                LongArray sizePA = (LongArray) table.getColumn(SIZE);

                DoubleArray lastModDA = new DoubleArray();
                addToThreddsUrlList(tDir, tFileNameRegex, tRecursive, tPathRegex, tDirectoriesToo, namePA,
                        lastModDA, sizePA);
                lastModifiedPA.append(lastModDA);
                int n = namePA.size();
                for (int i = 0; i < n; i++) {
                    String fn = namePA.get(i);
                    directoryPA.add(File2.getDirectory(fn));
                    namePA.set(i, File2.getNameAndExtension(fn));
                }

                table.leftToRightSortIgnoreCase(2);
                return table;
            } catch (Throwable t) {
                throw new IOException(t.getMessage(), t);
            }
        }

        //default: Apache-style WAF
        try {
            if (verbose)
                String2.log("FileVisitorDNLS.oneStep getting info from Apache-style WAF at" + "\nURL=" + tDir);
            Table table = makeEmptyTable();
            StringArray directorySA = (StringArray) table.getColumn(DIRECTORY);
            StringArray nameSA = (StringArray) table.getColumn(NAME);
            LongArray lastModLA = (LongArray) table.getColumn(LASTMODIFIED);
            LongArray sizeLA = (LongArray) table.getColumn(SIZE);

            addToWAFUrlList(tDir, tFileNameRegex, tRecursive, tPathRegex, tDirectoriesToo, directorySA, nameSA,
                    lastModLA, sizeLA);
            table.leftToRightSortIgnoreCase(2);
            return table;
        } catch (Throwable t) {
            throw new IOException(t.getMessage(), t);
        }
    }

    //local files
    //follow symbolic links: https://docs.oracle.com/javase/7/docs/api/java/nio/file/FileVisitor.html
    //But this doesn't follow Windows symbolic link .lnk's:
    //  http://bugs.java.com/bugdatabase/view_bug.do?bug_id=4237760
    FileVisitorDNLS fv = new FileVisitorDNLS(tDir, tFileNameRegex, tRecursive, tPathRegex, tDirectoriesToo);
    EnumSet<FileVisitOption> opts = EnumSet.of(FileVisitOption.FOLLOW_LINKS);
    Files.walkFileTree(FileSystems.getDefault().getPath(tDir), opts, //follow symbolic links
            Integer.MAX_VALUE, //maxDepth
            fv);
    fv.table.leftToRightSortIgnoreCase(2);
    if (verbose)
        String2.log("FileVisitorDNLS.oneStep(local) finished successfully. n=" + fv.directoryPA.size()
                + " time=" + (System.currentTimeMillis() - time) + "ms");
    return fv.table;
}

From source file:com.bytelightning.opensource.pokerface.PokerFace.java

/**
 * If requested by the user, this method walks the script directory discovering, loading, compiling, and initialing an .js javascript files it finds in the specified directory or it's children.
 * @param baseScriptDirectory   The contents of this directory should be structured in the same layout as the url's we wish to interfere with.
 * @param watchScriptDirectory   If true, a watch will be placed on <code>baseScriptDirectory</code> and any javascript file modifications (cud) will be dynamically rebuilt and reflected in the running server. 
 * @return   True if all scripts were successfully loaded.
 *///from w ww.jav  a2 s . co  m
protected boolean configureScripts(final List<Path> jsLibs, final HierarchicalConfiguration scriptConfig,
        final Path baseScriptDirectory, boolean watchScriptDirectory) {
    // Our unit test has verified that CompiledScripts can produce objects (endpoints) that can be executed from ANY thread (and even concurrently execute immutable methods).
    // However we have not validated that Nashorn can compile *and* recompile scripts from multiple threads.
    //TODO: Write unit test to see if we can use all available processors to compile discovered javascript files.
    ScriptCompilationExecutor = Executors.newSingleThreadScheduledExecutor();
    // This is done to make sure the engine is allocated in the same thread that will be doing the compiling.
    Callable<Boolean> compileScriptsTask = new Callable<Boolean>() {
        @Override
        public Boolean call() {
            Nashorn = new ScriptEngineManager().getEngineByName("nashorn");

            if (jsLibs != null)
                for (Path lib : jsLibs)
                    if (!loadScriptLibrary(lib))
                        return false;

            // Recursively discover javascript files, compile, load, and setup any that are found.
            EnumSet<FileVisitOption> opts = EnumSet.of(FileVisitOption.FOLLOW_LINKS);
            try {
                Files.walkFileTree(baseScriptDirectory, opts, Integer.MAX_VALUE, new SimpleFileVisitor<Path>() {
                    @Override
                    public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs)
                            throws IOException {
                        if (Files.isDirectory(dir) && dir.getFileName().toString().startsWith("#"))
                            return FileVisitResult.SKIP_SUBTREE;
                        return super.preVisitDirectory(dir, attrs);
                    }

                    @Override
                    public FileVisitResult visitFile(Path path, BasicFileAttributes attrs) throws IOException {
                        if (Files.isRegularFile(path)) {
                            if (path.toString().toLowerCase().endsWith(".js")) {
                                MakeJavaScriptEndPointDescriptor(baseScriptDirectory, path, scriptConfig,
                                        new NewEndpointSetupCallback());
                            }
                        }
                        return FileVisitResult.CONTINUE;
                    }
                });
            } catch (IOException e) {
                Logger.error("Unable recursively load scripts", e);
                return false;
            }
            return true;
        }
    };
    // Walk the root directory recursively compiling all discovered javascript files (does not return until all endpoint files have been setup).
    try {
        if (!ScriptCompilationExecutor.submit(compileScriptsTask).get())
            return false;
    } catch (Throwable e) {
        Logger.error("Unable to compile scripts", e);
        return false;
    }
    if (watchScriptDirectory) {
        try {
            // Establish a watch on the root
            ScriptDirectoryWatcher.establishWatch(baseScriptDirectory, new DirectoryWatchEventListener() {
                // Internal Callable task to load, compile, and initialize a javascript file endpoint.
                final class CreateEndpointTask implements Callable<Void> {
                    public CreateEndpointTask(Path file, EndpointSetupCompleteCallback callback) {
                        this.file = file;
                        this.callback = callback;
                    }

                    private final Path file;
                    private final EndpointSetupCompleteCallback callback;

                    @Override
                    public Void call() {
                        MakeJavaScriptEndPointDescriptor(baseScriptDirectory, file, scriptConfig, callback);
                        return null;
                    }
                }

                // Internal Callable task that gives us the ability to schedule a delayed unload of a deleted or obsoleted endpoint.
                // By delaying for a period of time longer than twice the socket timeout, we can safely call the endpoint's teardown method.
                final class DecommisionEndpointTask implements Callable<Void> {
                    private DecommisionEndpointTask(ScriptObjectMirror endpoint) {
                        this.endpoint = endpoint;
                    }

                    private final ScriptObjectMirror endpoint;

                    @Override
                    public Void call() {
                        if (endpoint.hasMember("teardown"))
                            endpoint.callMember("teardown");
                        return null;
                    }
                }

                /**
                 * Called by the WatchService when the contents of the script directory have changed.
                 */
                @Override
                public void onWatchEvent(Path watchDir, final Path oldFile, final Path newFile,
                        FileChangeType change) {
                    if (change == FileChangeType.eRenamed) {
                        // If it was changed to something that does *not* end .js then it should no longer be considered an endpoint.
                        if (oldFile.toString().toLowerCase().endsWith(".js"))
                            if (!newFile.toString().toLowerCase().endsWith(".js"))
                                change = FileChangeType.eDeleted;
                    }
                    if (change == FileChangeType.eModified || change == FileChangeType.eRenamed) {
                        // Decommission the obsolete and load the update.
                        try {
                            assert newFile.toString().toLowerCase().endsWith(".js"); // Will be true because of the 'rename' check at the top of this method.
                            ScriptCompilationExecutor
                                    .submit(new CreateEndpointTask(newFile, new NewEndpointSetupCallback() {
                                        @Override
                                        public ScriptObjectMirror setupComplete(JavaScriptEndPoint endpoint) {
                                            ScriptObjectMirror old = super.setupComplete(endpoint);
                                            assert old != null;
                                            // Yeah, it's hincky, but it won't be in use this long after we remove it from the Map.
                                            ScriptCompilationExecutor.schedule(new DecommisionEndpointTask(old),
                                                    6, TimeUnit.MINUTES);
                                            return null;
                                        }
                                    }));
                        } catch (Throwable e) {
                            Logger.error("Unable to compile modified script found at "
                                    + newFile.toAbsolutePath().toString(), e);
                        }
                    } else if (change == FileChangeType.eCreated) {
                        // This is the easy one.  If a javascript file was created, load it.
                        if (newFile.toString().toLowerCase().endsWith(".js")) {
                            try {
                                ScriptCompilationExecutor.submit(
                                        new CreateEndpointTask(newFile, new NewEndpointSetupCallback()));
                            } catch (Throwable e) {
                                Logger.error("Unable to compile new script found at "
                                        + newFile.toAbsolutePath().toString(), e);
                            }
                        }
                    } else if (change == FileChangeType.eDeleted) {
                        // Endpoint should be decommisioned.
                        if (oldFile.toString().toLowerCase().endsWith(".js")) {
                            String uriKey = FileToUriKey(baseScriptDirectory, oldFile);
                            ScriptObjectMirror desc = scripts.remove(uriKey);
                            if (desc != null) {
                                // Yeah, it's hincky, but it won't be in use this long after we remove it from the Map.
                                ScriptCompilationExecutor.schedule(new DecommisionEndpointTask(desc), 6,
                                        TimeUnit.MINUTES);
                            }
                        }
                    }
                }
            });
        } catch (IOException e) {
            Logger.error("Unable to establish a real time watch on the script directory.", e);
        }
    } else // Not watching for changes, so we are done with the Executor.
        ScriptCompilationExecutor.shutdown();
    return true;
}

From source file:net.sourceforge.pmd.cache.AbstractAnalysisCache.java

private URL[] getClassPathEntries() {
    final String classpath = System.getProperty("java.class.path");
    final String[] classpathEntries = classpath.split(File.pathSeparator);
    final List<URL> entries = new ArrayList<>();

    final SimpleFileVisitor<Path> fileVisitor = new SimpleFileVisitor<Path>() {
        @Override/*from ww w  . j a  va  2s  . c o  m*/
        public FileVisitResult visitFile(final Path file, final BasicFileAttributes attrs) throws IOException {
            if (!attrs.isSymbolicLink()) { // Broken link that can't be followed
                entries.add(file.toUri().toURL());
            }
            return FileVisitResult.CONTINUE;
        }
    };
    final SimpleFileVisitor<Path> jarFileVisitor = new SimpleFileVisitor<Path>() {
        @Override
        public FileVisitResult visitFile(final Path file, final BasicFileAttributes attrs) throws IOException {
            String extension = FilenameUtils.getExtension(file.toString());
            if ("jar".equalsIgnoreCase(extension)) {
                fileVisitor.visitFile(file, attrs);
            }
            return FileVisitResult.CONTINUE;
        }
    };

    try {
        for (final String entry : classpathEntries) {
            final File f = new File(entry);
            if (isClassPathWildcard(entry)) {
                Files.walkFileTree(new File(entry.substring(0, entry.length() - 1)).toPath(),
                        EnumSet.of(FileVisitOption.FOLLOW_LINKS), 1, jarFileVisitor);
            } else if (f.isFile()) {
                entries.add(f.toURI().toURL());
            } else {
                Files.walkFileTree(f.toPath(), EnumSet.of(FileVisitOption.FOLLOW_LINKS), Integer.MAX_VALUE,
                        fileVisitor);
            }
        }
    } catch (final IOException e) {
        LOG.log(Level.SEVERE, "Incremental analysis can't check execution classpath contents", e);
        throw new RuntimeException(e);
    }

    return entries.toArray(new URL[0]);
}

From source file:org.artifactory.support.core.bundle.AbstractSupportBundleService.java

/**
 * Lists previously created bundles/*w  w  w.  j  ava2 s .c  om*/
 *
 * @return archive/s
 */
@Override
public final List<String> list() {

    List<String> archives = Lists.newLinkedList();

    try {
        Files.walk(getOutputDirectory().toPath(), FileVisitOption.FOLLOW_LINKS).filter(Files::isRegularFile)
                .filter(f -> f.toFile().getName().startsWith(SUPPORT_BUNDLE_PREFIX)).filter(p -> FilenameUtils
                        .getExtension(p.toString()).equals(CompressionService.ARCHIVE_EXTENSION))
                .sorted(new Comparator<Path>() {
                    @Override
                    public int compare(Path o1, Path o2) {
                        return o2.toString().compareTo(o1.toString());
                    }
                }).forEach(p -> archives.add(p.toFile().getName()));
    } catch (IOException e) {
        e.printStackTrace();
    }

    return archives;
}

From source file:org.artifactory.support.core.compression.CompressionServiceImpl.java

/**
 * Performs cleanup//from   w  w w .  j  a v a 2s .c om
 *
 * @param directory content to clean
 */
private void cleanup(File directory) {
    try {
        Files.walk(directory.toPath(), FileVisitOption.FOLLOW_LINKS).filter(Files::isDirectory)
                .filter(p -> !p.equals(directory.toPath()))
                .filter(p -> !FilenameUtils.getExtension(p.toString()).equals("zip")).forEach(p -> {
                    try {
                        FileUtils.deleteDirectory(p.toFile());
                    } catch (IOException e) {
                        log.debug("Cannot delete folder: {}", e);
                    }
                });
    } catch (IOException e) {
        log.debug("Cleanup has failed: {}", e);
    }
}

From source file:org.ballerinalang.composer.service.fs.LocalFileSystem.java

@Override
public void delete(String path) throws IOException {
    Path ioPath = Paths.get(path);
    if (ioPath.toFile().isDirectory()) {
        Files.walk(ioPath, FileVisitOption.FOLLOW_LINKS).sorted(Comparator.reverseOrder()).map(Path::toFile)
                .forEach(File::delete);
    } else {//from w  w w .ja va 2 s.c om
        Files.delete(ioPath);
    }
}

From source file:org.ballerinalang.composer.service.workspace.local.LocalFSWorkspace.java

@Override
public void delete(String path, String type) throws IOException {
    Path ioPath = Paths.get(path);
    if (FOLDER_TYPE.equals(type)) {
        Files.walk(ioPath, FileVisitOption.FOLLOW_LINKS).sorted(Comparator.reverseOrder()).map(Path::toFile)
                .forEach(File::delete);
    } else {/*from ww w. ja va  2  s .c o m*/
        Files.delete(ioPath);
    }
}

From source file:org.codice.ddf.commands.catalog.IngestCommand.java

@Override
protected Object executeWithSubject() throws Exception {

    final CatalogFacade catalog = getCatalog();
    final File inputFile = new File(filePath);

    if (!inputFile.exists()) {
        printErrorMessage("File or directory [" + filePath + "] must exist.");
        console.println("If the file does indeed exist, try putting the path in quotes.");
        return null;
    }/*from   w  w  w  .  ja  v a  2 s .co  m*/

    if (deprecatedBatchSize != DEFAULT_BATCH_SIZE) {
        // user specified the old style batch size, so use that
        printErrorMessage(
                "Batch size positional argument is DEPRECATED, please use --batchsize option instead.");
        batchSize = deprecatedBatchSize;
    }

    if (batchSize <= 0) {
        printErrorMessage(
                "A batch size of [" + batchSize + "] was supplied. Batch size must be greater than 0.");
        return null;
    }

    if (!StringUtils.isEmpty(failedDir)) {
        failedIngestDirectory = new File(failedDir);
        if (!verifyFailedIngestDirectory()) {
            return null;
        }

        /**
         * Batch size is always set to 1, when using an Ingest Failure Directory.  If a batch size is specified by the user, issue 
         * a warning stating that a batch size of 1 will be used.
         */
        if (batchSize != DEFAULT_BATCH_SIZE) {
            console.println("WARNING: An ingest failure directory was supplied in addition to a batch size of "
                    + batchSize
                    + ". When using an ingest failure directory, the batch size must be 1. Setting batch size to 1.");
        }

        batchSize = 1;
    }

    BundleContext bundleContext = getBundleContext();
    if (!DEFAULT_TRANSFORMER_ID.equals(transformerId)) {
        ServiceReference[] refs = null;

        try {
            refs = bundleContext.getServiceReferences(InputTransformer.class.getName(),
                    "(|" + "(" + Constants.SERVICE_ID + "=" + transformerId + ")" + ")");
        } catch (InvalidSyntaxException e) {
            throw new IllegalArgumentException("Invalid transformer transformerId: " + transformerId, e);
        }

        if (refs == null || refs.length == 0) {
            throw new IllegalArgumentException("Transformer " + transformerId + " not found");
        } else {
            transformer = (InputTransformer) bundleContext.getService(refs[0]);
        }
    }

    Stream<Path> ingestStream = Files.walk(inputFile.toPath(), FileVisitOption.FOLLOW_LINKS);

    int totalFiles = (inputFile.isDirectory()) ? inputFile.list().length : 1;
    fileCount.getAndSet(totalFiles);

    final ArrayBlockingQueue<Metacard> metacardQueue = new ArrayBlockingQueue<>(batchSize * multithreaded);

    ExecutorService queueExecutor = Executors.newSingleThreadExecutor();

    final long start = System.currentTimeMillis();

    printProgressAndFlush(start, fileCount.get(), 0);

    queueExecutor.submit(() -> buildQueue(ingestStream, metacardQueue, start));

    final ScheduledExecutorService batchScheduler = Executors.newSingleThreadScheduledExecutor();

    BlockingQueue<Runnable> blockingQueue = new ArrayBlockingQueue<>(multithreaded);
    RejectedExecutionHandler rejectedExecutionHandler = new ThreadPoolExecutor.CallerRunsPolicy();
    ExecutorService executorService = new ThreadPoolExecutor(multithreaded, multithreaded, 0L,
            TimeUnit.MILLISECONDS, blockingQueue, rejectedExecutionHandler);

    submitToCatalog(batchScheduler, executorService, metacardQueue, catalog, start);

    while (!doneBuildingQueue.get() || processingThreads.get() != 0) {
        try {
            TimeUnit.SECONDS.sleep(2);
        } catch (InterruptedException e) {
            LOGGER.error("Ingest 'Waiting for processing to finish' thread interrupted: {}", e);
        }
    }

    try {
        queueExecutor.shutdown();
        executorService.shutdown();
        batchScheduler.shutdown();
    } catch (SecurityException e) {
        LOGGER.error("Executor service shutdown was not permitted: {}", e);
    }

    printProgressAndFlush(start, fileCount.get(), ingestCount.get() + ignoreCount.get());
    long end = System.currentTimeMillis();
    console.println();
    String elapsedTime = timeFormatter.print(new Period(start, end).withMillis(0));

    console.println();
    console.printf(" %d file(s) ingested in %s %n", ingestCount.get(), elapsedTime);

    LOGGER.info("{} file(s) ingested in {} [{} records/sec]", ingestCount.get(), elapsedTime,
            calculateRecordsPerSecond(ingestCount.get(), start, end));
    INGEST_LOGGER.info("{} file(s) ingested in {} [{} records/sec]", ingestCount.get(), elapsedTime,
            calculateRecordsPerSecond(ingestCount.get(), start, end));

    if (fileCount.get() != ingestCount.get()) {
        console.println();
        if ((fileCount.get() - ingestCount.get() - ignoreCount.get()) >= 1) {
            String failedAmount = Integer.toString(fileCount.get() - ingestCount.get() - ignoreCount.get());
            printErrorMessage(
                    failedAmount + " file(s) failed to be ingested.  See the ingest log for more details.");
            INGEST_LOGGER.warn("{} files(s) failed to be ingested.", failedAmount);
        }
        if (ignoreList != null) {
            String ignoredAmount = Integer.toString(ignoreCount.get());
            printColor(Ansi.Color.YELLOW,
                    ignoredAmount + " file(s) ignored.  See the ingest log for more details.");
            INGEST_LOGGER.warn("{} files(s) were ignored.", ignoredAmount);
        }
    }
    console.println();

    return null;
}

From source file:org.craftercms.studio.impl.v1.repository.disk.DiskContentRepository.java

/**
 * get immediate children for path//from  w w  w.jav  a 2  s .  co m
 * @param path path to content
 */
public RepositoryItem[] getContentChildren(String path) {
    final List<RepositoryItem> retItems = new ArrayList<RepositoryItem>();

    try {
        EnumSet<FileVisitOption> opts = EnumSet.of(FileVisitOption.FOLLOW_LINKS);
        final String finalPath = path;
        Files.walkFileTree(constructRepoPath(finalPath), opts, 1, new SimpleFileVisitor<Path>() {
            @Override
            public FileVisitResult visitFile(Path visitPath, BasicFileAttributes attrs) throws IOException {

                if (!visitPath.equals(constructRepoPath(finalPath))) {
                    RepositoryItem item = new RepositoryItem();
                    item.name = visitPath.toFile().getName();

                    String visitFolderPath = visitPath.toString();//.replace("/index.xml", "");
                    //Path visitFolder = constructRepoPath(visitFolderPath);
                    item.isFolder = visitPath.toFile().isDirectory();
                    int lastIdx = visitFolderPath.lastIndexOf(File.separator + item.name);
                    if (lastIdx > 0) {
                        item.path = visitFolderPath.substring(0, lastIdx);
                    }
                    //item.path = visitFolderPath.replace("/" + item.name, "");
                    item.path = item.path.replace(getRootPath().replace("/", File.separator), "");
                    item.path = item.path.replace(File.separator + ".xml", "");
                    item.path = item.path.replace(File.separator, "/");

                    if (!".DS_Store".equals(item.name)) {
                        logger.debug("ITEM NAME: {0}", item.name);
                        logger.debug("ITEM PATH: {0}", item.path);
                        logger.debug("ITEM FOLDER: ({0}): {1}", visitFolderPath, item.isFolder);
                        retItems.add(item);
                    }
                }

                return FileVisitResult.CONTINUE;
            }
        });
    } catch (Exception err) {
        // log this error
    }

    RepositoryItem[] items = new RepositoryItem[retItems.size()];
    items = retItems.toArray(items);
    return items;
}

From source file:org.craftercms.studio.impl.v1.repository.disk.DiskContentRepository.java

/**
 * get the version history for an item/*from  www.  j  av a  2 s  . c o m*/
 * @param path - the path of the item
 */
public VersionTO[] getContentVersionHistory(String path) {
    final List<VersionTO> versionList = new ArrayList<VersionTO>();

    try {
        final String pathToContent = path.substring(0, path.lastIndexOf(File.separator));
        final String filename = path.substring(path.lastIndexOf(File.separator) + 1);

        Path versionPath = constructVersionRepoPath(pathToContent);

        EnumSet<FileVisitOption> opts = EnumSet.of(FileVisitOption.FOLLOW_LINKS);

        Files.walkFileTree(versionPath, opts, 1, new SimpleFileVisitor<Path>() {
            @Override
            public FileVisitResult visitFile(Path visitPath, BasicFileAttributes attrs) throws IOException {
                String versionFilename = visitPath.toString();

                if (versionFilename.contains(filename)) {
                    VersionTO version = new VersionTO();
                    String label = versionFilename.substring(versionFilename.lastIndexOf("--") + 2);

                    BasicFileAttributes attr = Files.readAttributes(visitPath, BasicFileAttributes.class);

                    version.setVersionNumber(label);
                    version.setLastModifier("ADMIN");
                    version.setLastModifiedDate(new Date(attr.lastModifiedTime().toMillis()));
                    version.setComment("");

                    versionList.add(version);
                }
                return FileVisitResult.CONTINUE;
            }
        });
    } catch (Exception err) {
        logger.error("error while getting history for content item " + path);
        logger.debug("error while getting history for content item " + path, err);
    }
    final List<VersionTO> finalVersionList = new ArrayList<VersionTO>();
    if (versionList.size() > 0) {
        Collections.sort(versionList);
        VersionTO latest = versionList.get(versionList.size() - 1);
        String latestVersionLabel = latest.getVersionNumber();
        int temp = latestVersionLabel.indexOf(".");
        String currentMajorVersion = latestVersionLabel.substring(0, temp);

        for (int i = versionList.size(); i > 0; i--) {
            VersionTO v = versionList.get(i - 1);
            String versionId = v.getVersionNumber();
            boolean condition = !versionId.startsWith(currentMajorVersion) && !versionId.endsWith(".0");
            if (condition)
                continue;
            finalVersionList.add(v);
        }
    }
    //Collections.reverse(versionList);
    VersionTO[] versions = new VersionTO[finalVersionList.size()];
    versions = finalVersionList.toArray(versions);
    return versions;
}