List of usage examples for org.apache.commons.vfs2 FileObject exists
boolean exists() throws FileSystemException;
From source file:org.ow2.proactive_grid_cloud_portal.scheduler.SchedulerStateRest.java
/** * Either Pulls a file from the given DataSpace to the local file system or * list the content of a directory if the path refers to a directory In the * case the path to a file is given, the content of this file will be * returns as an input stream In the case the path to a directory is given, * the input stream returned will be a text stream containing at each line * the content of the directory//from w w w.j av a 2s. co m * * @param sessionId * a valid session id * @param spaceName * the name of the data space involved (GLOBAL or USER) * @param filePath * the path to the file or directory whose content must be * received **/ @Override public InputStream pullFile(@HeaderParam("sessionid") String sessionId, @PathParam("spaceName") String spaceName, @PathParam("filePath") String filePath) throws IOException, NotConnectedRestException, PermissionRestException { checkAccess(sessionId, "pullFile"); Session session = dataspaceRestApi.checkSessionValidity(sessionId); filePath = normalizeFilePath(filePath, null); FileObject sourcefo = dataspaceRestApi.resolveFile(session, spaceName, filePath); if (!sourcefo.exists() || !sourcefo.isReadable()) { RuntimeException ex = new IllegalArgumentException( "File " + filePath + " does not exist or is not readable in space " + spaceName); logger.error(ex); throw ex; } if (sourcefo.getType().equals(FileType.FOLDER)) { logger.info("[pullFile] reading directory content from " + sourcefo.getURL()); // if it's a folder we return an InputStream listing its content StringBuilder sb = new StringBuilder(); String nl = System.lineSeparator(); for (FileObject fo : sourcefo.getChildren()) { sb.append(fo.getName().getBaseName() + nl); } return IOUtils.toInputStream(sb.toString()); } else if (sourcefo.getType().equals(FileType.FILE)) { logger.info("[pullFile] reading file content from " + sourcefo.getURL()); return sourcefo.getContent().getInputStream(); } else { RuntimeException ex = new IllegalArgumentException( "File " + filePath + " has an unsupported type " + sourcefo.getType()); logger.error(ex); throw ex; } }
From source file:org.ow2.proactive_grid_cloud_portal.scheduler.SchedulerStateRest.java
/** * Deletes a file or recursively delete a directory from the given DataSpace * /* ww w.j av a 2 s . c o m*/ * @param sessionId * a valid session id * @param spaceName * the name of the data space involved (GLOBAL or USER) * @param filePath * the path to the file or directory which must be deleted **/ @Override public boolean deleteFile(@HeaderParam("sessionid") String sessionId, @PathParam("spaceName") String spaceName, @PathParam("filePath") String filePath) throws IOException, NotConnectedRestException, PermissionRestException { checkAccess(sessionId, "deleteFile"); Session session = dataspaceRestApi.checkSessionValidity(sessionId); filePath = normalizeFilePath(filePath, null); FileObject sourcefo = dataspaceRestApi.resolveFile(session, spaceName, filePath); if (!sourcefo.exists() || !sourcefo.isWriteable()) { RuntimeException ex = new IllegalArgumentException( "File or Folder " + filePath + " does not exist or is not writable in space " + spaceName); logger.error(ex); throw ex; } if (sourcefo.getType().equals(FileType.FILE)) { logger.info("[deleteFile] deleting file " + sourcefo.getURL()); sourcefo.delete(); } else if (sourcefo.getType().equals(FileType.FOLDER)) { logger.info("[deleteFile] deleting folder (and all its descendants) " + sourcefo.getURL()); sourcefo.delete(Selectors.SELECT_ALL); } else { RuntimeException ex = new IllegalArgumentException( "File " + filePath + " has an unsupported type " + sourcefo.getType()); logger.error(ex); throw ex; } return true; }
From source file:org.pentaho.di.core.hadoop.HadoopConfigurationBootstrap.java
public synchronized List<HadoopConfigurationInfo> getHadoopConfigurationInfos() throws KettleException, ConfigurationException, IOException { List<HadoopConfigurationInfo> result = new ArrayList<>(); FileObject hadoopConfigurationsDir = resolveHadoopConfigurationsDirectory(); // If the folder doesn't exist, return an empty list if (hadoopConfigurationsDir.exists()) { String activeId = getActiveConfigurationId(); String willBeActiveId = getWillBeActiveConfigurationId(); for (FileObject childFolder : hadoopConfigurationsDir.getChildren()) { if (childFolder.getType() == FileType.FOLDER) { String id = childFolder.getName().getBaseName(); FileObject configPropertiesFile = childFolder.getChild(CONFIG_PROPERTIES); if (configPropertiesFile.exists()) { Properties properties = new Properties(); properties.load(configPropertiesFile.getContent().getInputStream()); result.add(new HadoopConfigurationInfo(id, properties.getProperty("name", id), id.equals(activeId), willBeActiveId.equals(id))); }/* ww w .ja v a 2 s .c om*/ } } } return result; }
From source file:org.pentaho.di.core.hadoop.HadoopConfigurationBootstrap.java
/** * Find the location of the big data plugin. This relies on the Hadoop Job Executor job entry existing within the big * data plugin./*from w w w . j a v a2 s. co m*/ * * @return The VFS location of the big data plugin * @throws KettleException */ public FileObject locatePluginDirectory() throws ConfigurationException { FileObject dir = null; boolean exists = false; try { dir = KettleVFS.getFileObject(getPluginInterface().getPluginDirectory().toExternalForm()); exists = dir.exists(); } catch (Exception e) { throw new ConfigurationException( BaseMessages.getString(PKG, "HadoopConfigurationBootstrap.PluginDirectoryNotFound"), e); } if (!exists) { throw new ConfigurationException( BaseMessages.getString(PKG, "HadoopConfigurationBootstrap.PluginDirectoryNotFound")); } return dir; }
From source file:org.pentaho.di.core.util.CurrentDirectoryResolver.java
public VariableSpace resolveCurrentDirectory(VariableSpace parentVariables, RepositoryDirectoryInterface directory, String filename) { Variables tmpSpace = new Variables(); tmpSpace.setParentVariableSpace(parentVariables); tmpSpace.initializeVariablesFrom(parentVariables); if (directory != null) { tmpSpace.setVariable(Const.INTERNAL_VARIABLE_ENTRY_CURRENT_DIRECTORY, directory.toString()); tmpSpace.setVariable(Const.INTERNAL_VARIABLE_JOB_FILENAME_DIRECTORY, directory.toString()); tmpSpace.setVariable(Const.INTERNAL_VARIABLE_TRANSFORMATION_FILENAME_DIRECTORY, directory.toString()); } else if (filename != null) { try {/* w ww. j a va 2s . com*/ FileObject fileObject = KettleVFS.getFileObject(filename, tmpSpace); if (!fileObject.exists()) { // don't set variables if the file doesn't exist return tmpSpace; } FileName fileName = fileObject.getName(); // The filename of the transformation tmpSpace.setVariable(Const.INTERNAL_VARIABLE_JOB_FILENAME_NAME, fileName.getBaseName()); // The directory of the transformation FileName fileDir = fileName.getParent(); tmpSpace.setVariable(Const.INTERNAL_VARIABLE_TRANSFORMATION_FILENAME_DIRECTORY, fileDir.getURI()); tmpSpace.setVariable(Const.INTERNAL_VARIABLE_JOB_FILENAME_DIRECTORY, fileDir.getURI()); tmpSpace.setVariable(Const.INTERNAL_VARIABLE_ENTRY_CURRENT_DIRECTORY, fileDir.getURI()); } catch (Exception e) { } } return tmpSpace; }
From source file:org.pentaho.di.job.entries.checkdbconnection.JobEntryCheckDbConnectionsIT.java
@After public void cleanup() { try {//from w w w . j a v a 2s . c o m FileObject dbFile = KettleVFS.getFileObject(H2_DATABASE + ".h2.db"); if (dbFile.exists()) { System.out.println("deleting file"); dbFile.delete(); } } catch (KettleFileException | FileSystemException ignored) { // Ignore, we tried cleaning up } }
From source file:org.pentaho.di.job.entries.ftpsget.JobEntryFTPSGetIT.java
@Test public void downloadFile_WhenDestinationIsSetViaVariable() throws Exception { final String myVar = "my-var"; final String expectedDownloadedFilePath = ramDir + "/" + FtpsServer.SAMPLE_FILE; JobEntryFTPSGet job = createCommonJob(); job.setVariable(myVar, ramDir);/*w ww.j a v a 2s .c o m*/ job.setTargetDirectory(String.format("${%s}", myVar)); FileObject downloaded = KettleVFS.getFileObject(expectedDownloadedFilePath); assertFalse(downloaded.exists()); try { job.execute(new Result(), 1); downloaded = KettleVFS.getFileObject(expectedDownloadedFilePath); assertTrue(downloaded.exists()); } finally { downloaded.delete(); } }
From source file:org.pentaho.di.job.entries.ftpsget.JobEntryFTPSGetIT.java
@Test public void downloadFile_WhenDestinationIsSetDirectly() throws Exception { JobEntryFTPSGet job = createCommonJob(); job.setTargetDirectory(ramDir);/*from w w w .java2s .c o m*/ FileObject downloaded = KettleVFS.getFileObject(ramDir + "/" + FtpsServer.SAMPLE_FILE); assertFalse(downloaded.exists()); try { job.execute(new Result(), 1); downloaded = KettleVFS.getFileObject(ramDir + "/" + FtpsServer.SAMPLE_FILE); assertTrue(downloaded.exists()); } finally { downloaded.delete(); } }
From source file:org.pentaho.di.job.entries.googledrive.JobEntryGoogleDriveExport.java
protected static void exportFile(Drive driveService, File driveFile, FileObject targetFile, GoogleDriveExportFormat exportMapping) throws KettleException { Exception savedException = null; if (exportMapping != null) { FileObject tempFile = KettleVFS.createTempFile(JobEntryGoogleDriveExport.class.getSimpleName(), ".tmp", System.getProperty("java.io.tmpdir")); try {//from ww w.ja v a 2s . c o m OutputStream fos = tempFile.getContent().getOutputStream(); BufferedOutputStream bos = new BufferedOutputStream(fos); try { driveService.files().export(driveFile.getId(), exportMapping.getMimeType()) .executeMediaAndDownloadTo(bos); } catch (IOException e) { // Throw this later, we want to close the output stream first savedException = new KettleException( BaseMessages.getString(PKG, "GoogleDriveExport.Error.ExportingFile"), e); } try { bos.close(); } catch (IOException ignore) { // Ignore } try { fos.close(); } catch (IOException ignore) { // Ignore } } catch (IOException e) { savedException = new KettleException( BaseMessages.getString(PKG, "GoogleDriveExport.Error.ExportingFile"), e); } if (tempFile != null) { try { targetFile.copyFrom(tempFile, Selectors.SELECT_SELF); } catch (FileSystemException e) { savedException = new KettleException( BaseMessages.getString(PKG, "GoogleDriveExport.Error.MovingFileFromTemp"), e); } } if (savedException != null) { try { if (targetFile.exists()) { targetFile.delete(); } } catch (FileSystemException ignore) { // Ignore, couldn't delete a bad output file } throw new KettleException(savedException); } } }
From source file:org.pentaho.di.job.entries.googledrive.JobEntryGoogleDriveExport.java
protected void checkFolderExists(FileObject folder, boolean createIfNot) throws KettleException { if (folder == null) { return;/* w w w . j av a2s . com*/ } try { if (!folder.exists() && createIfNot) { if (log.isDetailed()) { log.logDetailed(BaseMessages.getString(PKG, "GoogleDriveExport.Log.CreatingTargetFolder")); } folder.createFolder(); return; } else { if (!folder.exists()) { throw new KettleException(BaseMessages.getString(PKG, "GoogleDriveExport.Error.FolderNotExist", folder.getName())); } else if (!folder.isFolder()) { throw new KettleException( BaseMessages.getString(PKG, "GoogleDriveExport.Error.NotAFolder", folder.getName())); } } } catch (FileSystemException e) { throw new KettleException(e); } }