Example usage for org.apache.commons.vfs2 FileObject exists

List of usage examples for org.apache.commons.vfs2 FileObject exists

Introduction

In this page you can find the example usage for org.apache.commons.vfs2 FileObject exists.

Prototype

boolean exists() throws FileSystemException;

Source Link

Document

Determines if this file exists.

Usage

From source file:org.pentaho.di.ui.spoon.SharedObjectSyncUtilTest.java

@After
public void tearDown() throws Exception {
    FileObject sharedObjectsFile = KettleVFS.getFileObject(SHARED_OBJECTS_FILE);
    if (sharedObjectsFile.exists()) {
        sharedObjectsFile.delete();/*  w  w  w . jav  a  2s. c  om*/
    }
}

From source file:org.pentaho.googledrive.vfs.test.GoogleDriveFileObjectTest.java

@Test
public void testFileObject() throws Exception {
    FileSystemManager manager = mock(FileSystemManager.class);
    GoogleDriveFileObject fileObjectMock = mock(GoogleDriveFileObject.class);
    when(manager.resolveFile(FOLDER)).thenReturn(fileObjectMock);
    when(fileObjectMock.isFolder()).thenReturn(true);
    when(fileObjectMock.exists()).thenReturn(true);
    when(fileObjectMock.delete()).thenReturn(true);
    FileObject fileObject = manager.resolveFile(FOLDER);
    fileObject.createFolder();//from w  w w. j a  v  a  2  s.c o m
    assertTrue(fileObject.isFolder());
    assertTrue(fileObject.exists());
    assertTrue(fileObject.delete());
    assertNull(fileObject.getChildren());
}

From source file:org.pentaho.hadoop.PluginPropertiesUtil.java

/**
 * Loads a properties file from the plugin directory for the plugin interface provided
 *
 * @param plugin//from   ww  w.  jav a 2 s  . c  om
 * @return
 * @throws KettleFileException
 * @throws IOException
 */
protected Properties loadProperties(PluginInterface plugin, String relativeName)
        throws KettleFileException, IOException {
    if (plugin == null) {
        throw new NullPointerException();
    }
    FileObject propFile = KettleVFS
            .getFileObject(plugin.getPluginDirectory().getPath() + Const.FILE_SEPARATOR + relativeName);
    if (!propFile.exists()) {
        throw new FileNotFoundException(propFile.toString());
    }
    try {
        return new PropertiesConfigurationProperties(propFile);
    } catch (Exception e) {
        // Do not catch ConfigurationException. Different shims will use different
        // packages for this exception.
        throw new IOException(e);
    }
}

From source file:org.pentaho.hadoop.shim.common.DistributedCacheUtilImpl.java

/**
 * Stages the source file or folder to a Hadoop file system and sets their permission and replication value
 * appropriately to be used with the Distributed Cache. WARNING: This will delete the contents of dest before staging
 * the archive.//from w  w  w. j a  va  2  s  .c om
 *
 * @param source    File or folder to copy to the file system. If it is a folder all contents will be copied into
 *                  dest.
 * @param fs        Hadoop file system to store the contents of the archive in
 * @param dest      Destination to copy source into. If source is a file, the new file name will be exactly dest. If
 *                  source is a folder its contents will be copied into dest. For more info see {@link
 *                  FileSystem#copyFromLocalFile(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path)}.
 * @param overwrite Should an existing file or folder be overwritten? If not an exception will be thrown.
 * @throws IOException         Destination exists is not a directory
 * @throws KettleFileException Source does not exist or destination exists and overwrite is false.
 */
public void stageForCache(FileObject source, FileSystem fs, Path dest, boolean overwrite, boolean isPublic)
        throws IOException, KettleFileException {
    if (!source.exists()) {
        throw new KettleFileException(BaseMessages.getString(DistributedCacheUtilImpl.class,
                "DistributedCacheUtil.SourceDoesNotExist", source));
    }

    if (fs.exists(dest)) {
        if (overwrite) {
            // It is a directory, clear it out
            fs.delete(dest, true);
        } else {
            throw new KettleFileException(BaseMessages.getString(DistributedCacheUtilImpl.class,
                    "DistributedCacheUtil.DestinationExists", dest.toUri().getPath()));
        }
    }

    // Use the same replication we'd use for submitting jobs
    short replication = (short) fs.getConf().getInt("mapred.submit.replication", 10);

    if (source.getURL().toString().endsWith(CONFIG_PROPERTIES)) {
        copyConfigProperties(source, fs, dest);
    } else {
        Path local = new Path(source.getURL().getPath());
        fs.copyFromLocalFile(local, dest);
    }

    if (isPublic) {
        fs.setPermission(dest, PUBLIC_CACHED_FILE_PERMISSION);
    } else {
        fs.setPermission(dest, CACHED_FILE_PERMISSION);
    }
    fs.setReplication(dest, replication);
}

From source file:org.pentaho.hadoop.shim.common.DistributedCacheUtilImpl.java

/**
 * Delete a directory and all of its contents
 *
 * @param dir Directory to delete//from w  w w . j a  v  a2s. c  o  m
 * @return True if the directory was deleted successfully
 */
public boolean deleteDirectory(FileObject dir) throws FileSystemException {
    dir.delete(new AllFileSelector());
    return !dir.exists();
}

From source file:org.pentaho.hadoop.shim.common.DistributedCacheUtilImpl.java

/**
 * Extract a zip archive to a directory.
 *
 * @param archive Zip archive to extract
 * @param dest    Destination directory. This must not exist!
 * @return Directory the zip was extracted into
 * @throws IllegalArgumentException when the archive file does not exist or the destination directory already exists
 * @throws IOException/*from   w w w.  ja  va2 s.  c om*/
 * @throws KettleFileException
 */
public FileObject extract(FileObject archive, FileObject dest) throws IOException, KettleFileException {
    if (!archive.exists()) {
        throw new IllegalArgumentException("archive does not exist: " + archive.getURL().getPath());
    }

    if (dest.exists()) {
        throw new IllegalArgumentException("destination already exists");
    }
    dest.createFolder();

    try {
        byte[] buffer = new byte[DEFAULT_BUFFER_SIZE];
        int len = 0;
        ZipInputStream zis = new ZipInputStream(archive.getContent().getInputStream());
        try {
            ZipEntry ze;
            while ((ze = zis.getNextEntry()) != null) {
                FileObject entry = KettleVFS.getFileObject(dest + Const.FILE_SEPARATOR + ze.getName());
                FileObject parent = entry.getParent();
                if (parent != null) {
                    parent.createFolder();
                }
                if (ze.isDirectory()) {
                    entry.createFolder();
                    continue;
                }

                OutputStream os = KettleVFS.getOutputStream(entry, false);
                try {
                    while ((len = zis.read(buffer)) > 0) {
                        os.write(buffer, 0, len);
                    }
                } finally {
                    if (os != null) {
                        os.close();
                    }
                }
            }
        } finally {
            if (zis != null) {
                zis.close();
            }
        }
    } catch (Exception ex) {
        // Try to clean up the temp directory and all files
        if (!deleteDirectory(dest)) {
            throw new KettleFileException("Could not clean up temp dir after error extracting", ex);
        }
        throw new KettleFileException("error extracting archive", ex);
    }

    return dest;
}

From source file:org.pentaho.hadoop.shim.common.DistributedCacheUtilImpl.java

/**
 * Attempts to find a plugin's installation folder on disk within all known plugin folder locations
 *
 * @param pluginFolderName Name of plugin folder
 * @return Tuple of [(FileObject) Location of the first plugin folder found as a direct descendant of one of the known
 * plugin folder locations, (String) Relative path from parent]
 * @throws KettleFileException Error getting plugin folders
 *///from w  w  w.j  a v  a 2s .  c  o  m
protected Object[] findPluginFolder(final String pluginFolderName) throws KettleFileException {
    List<PluginFolderInterface> pluginFolders = PluginFolder.populateFolders(null);
    if (pluginFolders != null) {
        for (PluginFolderInterface pluginFolder : pluginFolders) {
            FileObject folder = KettleVFS.getFileObject(pluginFolder.getFolder());

            try {
                if (folder.exists()) {
                    FileObject[] files = folder.findFiles(new FileSelector() {
                        @Override
                        public boolean includeFile(FileSelectInfo fileSelectInfo) throws Exception {
                            if (fileSelectInfo.getFile().equals(fileSelectInfo.getBaseFolder())) {
                                // Do not consider the base folders
                                return false;
                            }
                            // Determine relative name to compare
                            int baseNameLength = fileSelectInfo.getBaseFolder().getName().getPath().length()
                                    + 1;
                            String relativeName = fileSelectInfo.getFile().getName().getPath()
                                    .substring(baseNameLength);
                            // Compare plugin folder name with the relative name
                            return pluginFolderName.equals(relativeName);
                        }

                        @Override
                        public boolean traverseDescendents(FileSelectInfo fileSelectInfo) throws Exception {
                            return true;
                        }
                    });
                    if (files != null && files.length > 0) {
                        return new Object[] { files[0], folder.getName().getRelativeName(files[0].getName()) }; // Return the first match
                    }
                }
            } catch (FileSystemException ex) {
                throw new KettleFileException("Error searching for folder '" + pluginFolderName + "'", ex);
            }
        }
    }
    return null;
}

From source file:org.pentaho.hadoop.shim.common.DistributedCacheUtilImplTest.java

@Test
public void deleteDirectory() throws Exception {
    FileObject test = KettleVFS.getFileObject("bin/test/deleteDirectoryTest");
    test.createFolder();//from   w  w w .ja v a 2  s .co m

    DistributedCacheUtilImpl ch = new DistributedCacheUtilImpl(TEST_CONFIG);
    ch.deleteDirectory(test);
    try {
        assertFalse(test.exists());
    } finally {
        // Delete the directory with java.io.File if it wasn't removed
        File f = new File("bin/test/deleteDirectoryTest");
        if (f.exists() && !f.delete()) {
            throw new IOException("unable to delete test directory: " + f.getAbsolutePath());
        }
    }
}

From source file:org.pentaho.hadoop.shim.common.DistributedCacheUtilImplTest.java

@Test
public void extractToTemp() throws Exception {
    DistributedCacheUtilImpl ch = new DistributedCacheUtilImpl(TEST_CONFIG);

    FileObject archive = KettleVFS
            .getFileObject(getClass().getResource("/pentaho-mapreduce-sample.jar").toURI().getPath());
    FileObject extracted = ch.extractToTemp(archive);

    assertNotNull(extracted);//from   www.j a va  2 s. co  m
    assertTrue(extracted.exists());
    try {
        // There should be 3 files and 5 directories inside the root folder (which is the 9th entry)
        assertTrue(extracted.findFiles(new AllFileSelector()).length == 9);
    } finally {
        // clean up after ourself
        ch.deleteDirectory(extracted);
    }
}

From source file:org.pentaho.hadoop.shim.common.DistributedCacheUtilImplTest.java

@Test
public void extractToTempZipEntriesMixed() throws Exception {
    DistributedCacheUtilImpl ch = new DistributedCacheUtilImpl(TEST_CONFIG);

    File dest = File.createTempFile("entriesMixed", ".zip");
    ZipOutputStream outputStream = new ZipOutputStream(new FileOutputStream(dest));
    ZipEntry e = new ZipEntry("zipEntriesMixed" + "/" + "someFile.txt");
    outputStream.putNextEntry(e);// w w w  .ja v  a 2  s .  c  o  m
    byte[] data = "someOutString".getBytes();
    outputStream.write(data, 0, data.length);
    outputStream.closeEntry();
    e = new ZipEntry("zipEntriesMixed" + "/");
    outputStream.putNextEntry(e);
    outputStream.closeEntry();
    outputStream.close();

    FileObject archive = KettleVFS.getFileObject(dest.getAbsolutePath());

    FileObject extracted = null;
    try {
        extracted = ch.extractToTemp(archive);
    } catch (IOException | KettleFileException e1) {
        e1.printStackTrace();
        fail("Exception not expected in this case");
    }

    assertNotNull(extracted);
    assertTrue(extracted.exists());
    try {
        // There should be 3 files and 5 directories inside the root folder (which is the 9th entry)
        assertTrue(extracted.findFiles(new AllFileSelector()).length == 3);
    } finally {
        // clean up after ourself
        ch.deleteDirectory(extracted);
        dest.delete();
    }
}