Example usage for org.apache.commons.vfs2 FileObject findFiles

List of usage examples for org.apache.commons.vfs2 FileObject findFiles

Introduction

In this page you can find the example usage for org.apache.commons.vfs2 FileObject findFiles.

Prototype

FileObject[] findFiles(FileSelector selector) throws FileSystemException;

Source Link

Document

Finds the set of matching descendants of this file, in depthwise order.

Usage

From source file:org.pentaho.hadoop.shim.common.DistributedCacheUtilImpl.java

/**
 * Attempts to find a plugin's installation folder on disk within all known plugin folder locations
 *
 * @param pluginFolderName Name of plugin folder
 * @return Tuple of [(FileObject) Location of the first plugin folder found as a direct descendant of one of the known
 * plugin folder locations, (String) Relative path from parent]
 * @throws KettleFileException Error getting plugin folders
 *//*from  w  w  w  .  j  av a2 s  . co  m*/
protected Object[] findPluginFolder(final String pluginFolderName) throws KettleFileException {
    List<PluginFolderInterface> pluginFolders = PluginFolder.populateFolders(null);
    if (pluginFolders != null) {
        for (PluginFolderInterface pluginFolder : pluginFolders) {
            FileObject folder = KettleVFS.getFileObject(pluginFolder.getFolder());

            try {
                if (folder.exists()) {
                    FileObject[] files = folder.findFiles(new FileSelector() {
                        @Override
                        public boolean includeFile(FileSelectInfo fileSelectInfo) throws Exception {
                            if (fileSelectInfo.getFile().equals(fileSelectInfo.getBaseFolder())) {
                                // Do not consider the base folders
                                return false;
                            }
                            // Determine relative name to compare
                            int baseNameLength = fileSelectInfo.getBaseFolder().getName().getPath().length()
                                    + 1;
                            String relativeName = fileSelectInfo.getFile().getName().getPath()
                                    .substring(baseNameLength);
                            // Compare plugin folder name with the relative name
                            return pluginFolderName.equals(relativeName);
                        }

                        @Override
                        public boolean traverseDescendents(FileSelectInfo fileSelectInfo) throws Exception {
                            return true;
                        }
                    });
                    if (files != null && files.length > 0) {
                        return new Object[] { files[0], folder.getName().getRelativeName(files[0].getName()) }; // Return the first match
                    }
                }
            } catch (FileSystemException ex) {
                throw new KettleFileException("Error searching for folder '" + pluginFolderName + "'", ex);
            }
        }
    }
    return null;
}

From source file:org.pentaho.hadoop.shim.common.DistributedCacheUtilImplTest.java

@Test
public void extractToTemp() throws Exception {
    DistributedCacheUtilImpl ch = new DistributedCacheUtilImpl(TEST_CONFIG);

    FileObject archive = KettleVFS
            .getFileObject(getClass().getResource("/pentaho-mapreduce-sample.jar").toURI().getPath());
    FileObject extracted = ch.extractToTemp(archive);

    assertNotNull(extracted);//from w w w  . j ava  2 s  . c  o  m
    assertTrue(extracted.exists());
    try {
        // There should be 3 files and 5 directories inside the root folder (which is the 9th entry)
        assertTrue(extracted.findFiles(new AllFileSelector()).length == 9);
    } finally {
        // clean up after ourself
        ch.deleteDirectory(extracted);
    }
}

From source file:org.pentaho.hadoop.shim.common.DistributedCacheUtilImplTest.java

@Test
public void extractToTempZipEntriesMixed() throws Exception {
    DistributedCacheUtilImpl ch = new DistributedCacheUtilImpl(TEST_CONFIG);

    File dest = File.createTempFile("entriesMixed", ".zip");
    ZipOutputStream outputStream = new ZipOutputStream(new FileOutputStream(dest));
    ZipEntry e = new ZipEntry("zipEntriesMixed" + "/" + "someFile.txt");
    outputStream.putNextEntry(e);/*from w w w .  jav a2s.  c  o  m*/
    byte[] data = "someOutString".getBytes();
    outputStream.write(data, 0, data.length);
    outputStream.closeEntry();
    e = new ZipEntry("zipEntriesMixed" + "/");
    outputStream.putNextEntry(e);
    outputStream.closeEntry();
    outputStream.close();

    FileObject archive = KettleVFS.getFileObject(dest.getAbsolutePath());

    FileObject extracted = null;
    try {
        extracted = ch.extractToTemp(archive);
    } catch (IOException | KettleFileException e1) {
        e1.printStackTrace();
        fail("Exception not expected in this case");
    }

    assertNotNull(extracted);
    assertTrue(extracted.exists());
    try {
        // There should be 3 files and 5 directories inside the root folder (which is the 9th entry)
        assertTrue(extracted.findFiles(new AllFileSelector()).length == 3);
    } finally {
        // clean up after ourself
        ch.deleteDirectory(extracted);
        dest.delete();
    }
}

From source file:org.pentaho.hadoop.shim.common.format.avro.PentahoAvroInputFormat.java

private DataFileStream<GenericRecord> createDataFileStream(String schemaFileName, String fileName)
        throws Exception {
    DatumReader<GenericRecord> datumReader;
    if (schemaFileName != null && schemaFileName.length() > 0) {
        datumReader = new GenericDatumReader<GenericRecord>(readAvroSchema(schemaFileName));
    } else {/*from w  ww.  jav  a 2 s.  c o  m*/
        datumReader = new GenericDatumReader<GenericRecord>();
    }
    FileObject fileObject = KettleVFS.getFileObject(fileName);
    if (fileObject.isFile()) {
        return new DataFileStream<GenericRecord>(fileObject.getContent().getInputStream(), datumReader);
    } else {
        FileObject[] avroFiles = fileObject.findFiles(new FileExtensionSelector("avro"));
        if (!Utils.isEmpty(avroFiles)) {
            return new DataFileStream<GenericRecord>(avroFiles[0].getContent().getInputStream(), datumReader);
        }
        return null;
    }
}

From source file:org.pentaho.hadoop.shim.HadoopConfigurationLocator.java

/**
 * Attempt to find any Hadoop configuration as a direct descendant of the provided directory.
 *
 * @param baseDir Directory to look for Hadoop configurations in
 * @throws ConfigurationException/*from   ww  w.j av a2  s.c  o m*/
 */
private void findHadoopConfigurations(FileObject baseDir, ActiveHadoopConfigurationLocator activeLocator)
        throws ConfigurationException {
    configurations = new HashMap<String, HadoopConfiguration>();
    try {
        if (!baseDir.exists()) {
            throw new ConfigurationException(BaseMessages.getString(PKG,
                    "Error.HadoopConfigurationDirectoryDoesNotExist", baseDir.getURL()));
        }
        for (FileObject f : baseDir.findFiles(new FileSelector() {
            @Override
            public boolean includeFile(FileSelectInfo info) throws Exception {
                return info.getDepth() == 1 && FileType.FOLDER.equals(info.getFile().getType());
            }

            @Override
            public boolean traverseDescendents(FileSelectInfo info) throws Exception {
                return info.getDepth() == 0;
            }
        })) {
            // Only load the specified configuration (ID should match the basename, we allow case-insensitivity)
            if (f.getName().getBaseName().equalsIgnoreCase(activeLocator.getActiveConfigurationId())) {
                HadoopConfiguration config = loadHadoopConfiguration(f);
                if (config != null) {
                    configurations.put(config.getIdentifier(), config);
                }
            }
        }
    } catch (FileSystemException ex) {
        throw new ConfigurationException(BaseMessages.getString(PKG, "Error.UnableToLoadConfigurations",
                baseDir.getName().getFriendlyURI()), ex);
    }
}

From source file:org.pentaho.hadoop.shim.HadoopConfigurationLocator.java

private List<URL> findJarsIn(FileObject path, final int maxdepth, final Set<String> paths)
        throws FileSystemException {
    FileObject[] jars = path.findFiles(new FileSelector() {
        @Override//www .j a va 2s .c  om
        public boolean includeFile(FileSelectInfo info) throws Exception {
            for (String path : paths) {
                if (info.getFile().getURL().toString().endsWith(path)) {
                    return false;
                }
            }
            return info.getFile().getName().getBaseName().endsWith(JAR_EXTENSION);
        }

        @Override
        public boolean traverseDescendents(FileSelectInfo info) throws Exception {
            for (String path : paths) {
                if (info.getFile().getURL().toString().endsWith(path)) {
                    return false;
                }
            }
            return info.getDepth() <= maxdepth;
        }
    });

    List<URL> jarUrls = new ArrayList<URL>();
    for (FileObject jar : jars) {
        jarUrls.add(jar.getURL());
    }
    return jarUrls;
}

From source file:org.pentaho.metaverse.impl.VfsLineageCollector.java

@Override
public List<String> listArtifacts(final String startingDate, final String endingDate)
        throws IllegalArgumentException {
    List<String> paths = new ArrayList<>();
    try {/*from   w w  w  .  ja  v a 2  s.  c o  m*/
        FileSystemOptions opts = new FileSystemOptions();
        FileObject lineageRootFolder = KettleVFS.getFileObject(getOutputFolder(), opts);

        FileSelector dateRangeFilter = new VfsDateRangeFilter(format, startingDate, endingDate);
        FileSelector depthFilter = new FileDepthSelector(1, 256);

        if (lineageRootFolder.exists() && lineageRootFolder.getType() == FileType.FOLDER) {
            // get the folders that come on or after the startingDate
            FileObject[] dayFolders = lineageRootFolder.findFiles(dateRangeFilter);
            for (FileObject dayFolder : dayFolders) {
                FileObject[] listThisFolder = dayFolder.findFiles(depthFilter);
                for (FileObject currentFile : listThisFolder) {
                    if (currentFile.getType() == FileType.FILE) {
                        paths.add(currentFile.getName().getPath());
                    }
                }
            }
        }
        return paths;
    } catch (Exception e) {
        throw new IllegalArgumentException(e);
    }
}

From source file:org.pentaho.metaverse.impl.VfsLineageCollector.java

@Override
public List<String> listArtifactsForFile(String pathToArtifact, String startingDate, String endingDate)
        throws IllegalArgumentException {
    List<String> paths = new ArrayList<>();

    try {//from   w  ww  .j a  v  a 2 s .c  om
        FileSystemOptions opts = new FileSystemOptions();
        FileObject lineageRootFolder = KettleVFS.getFileObject(getOutputFolder(), opts);

        FileSelector dateRangeFilter = new VfsDateRangeFilter(format, startingDate, endingDate);
        FileSelector depthFilter = new FileDepthSelector(1, 256);

        if (lineageRootFolder.exists() && lineageRootFolder.getType() == FileType.FOLDER) {

            // get all of the date folders of lineage we have
            FileObject[] dayFolders = lineageRootFolder.findFiles(dateRangeFilter);
            for (FileObject dayFolder : dayFolders) {
                FileObject[] listThisFolder = dayFolder.findFiles(depthFilter);
                for (FileObject currentFile : listThisFolder) {
                    FileObject requested = currentFile.resolveFile(pathToArtifact);
                    if (requested.exists() && requested.getType() == FileType.FOLDER) {
                        FileObject[] requestedChildren = requested.getChildren();
                        for (FileObject requestedChild : requestedChildren) {
                            if (requestedChild.getType() == FileType.FILE) {
                                paths.add(requestedChild.getName().getPath());
                            }
                        }
                    }
                }
            }
        }
        return paths;
    } catch (Exception e) {
        throw new IllegalArgumentException(e);
    }
}

From source file:sf.net.experimaestro.manager.js.JSFileObject.java

@JSFunction
@JSHelp("Find all the matching files within this folder")
public JSJson find_matching_files(
        @JSArgument(name = "regexp", type = "String", help = "The regular expression") String regexp)
        throws FileSystemException {
    final Pattern pattern = Pattern.compile(regexp);
    final JsonArray array = new JsonArray();
    FileObject[] files = file.findFiles(new FileFilterSelector(fileSelectInfo -> {
        LOGGER.info("Looking at %s", fileSelectInfo.getFile().getName());
        return pattern.matcher(fileSelectInfo.getFile().getName().getBaseName()).matches();
    }));/*from   w  w w.  java2  s.  c  o  m*/
    for (FileObject file : files) {
        array.add(new JsonFileObject(file));
    }
    return new JSJson(array);
}

From source file:sf.net.experimaestro.manager.js.ScriptTest.java

/**
 * Retrieves all the .js files (excluding .inc.js)
 * @return// w w w .j av  a2s  .  c o m
 * @throws IOException
 */
@Factory
public Object[] jsFactories() throws IOException {
    XPMEnvironment environment = new XPMEnvironment();

    final String testFile = System.getProperty(JS_TEST_FILE_KEY);

    // Get the JavaScript files
    final URL url = ScriptTest.class.getResource(JS_SCRIPT_PATH);
    FileSystemManager fsManager = VFS.getManager();
    FileObject dir = fsManager.resolveFile(url.toExternalForm());
    FileObject[] files = dir.findFiles(new FileSelector() {
        @Override
        public boolean traverseDescendents(FileSelectInfo info) throws Exception {
            return true;
        }

        @Override
        public boolean includeFile(FileSelectInfo file) throws Exception {
            String name = file.getFile().getName().getBaseName();
            if (testFile != null)
                return name.equals(testFile);
            return name.endsWith(".js") && !name.endsWith(".inc.js");
        }
    });

    Object[] r = new Object[files.length];
    for (int i = r.length; --i >= 0;)
        r[i] = new JavaScriptChecker(environment, files[i]);

    return r;

}