List of usage examples for org.apache.commons.vfs2 FileObject getName
FileName getName();
From source file:org.pentaho.di.bigdata.ShimDependentJobEntryPluginType.java
@Override public List<PluginFolderInterface> getPluginFolders() { return Arrays .<PluginFolderInterface>asList(new PluginFolder( new File(ShimDependentJobEntryPluginType.class.getProtectionDomain().getCodeSource() .getLocation().getPath()).getParentFile().toURI().toString() + "plugins/", false, true) {// www.j a v a 2s .c o m @Override public FileObject[] findJarFiles(final boolean includeLibJars) throws KettleFileException { try { // Find all the jar files in this folder... // FileObject folderObject = KettleVFS.getFileObject(this.getFolder()); FileObject[] fileObjects = folderObject.findFiles(new FileSelector() { @Override public boolean traverseDescendents(FileSelectInfo fileSelectInfo) throws Exception { FileObject fileObject = fileSelectInfo.getFile(); String folder = fileObject.getName().getBaseName(); return includeLibJars || !"lib".equals(folder); } @Override public boolean includeFile(FileSelectInfo fileSelectInfo) throws Exception { return fileSelectInfo.getFile().toString().endsWith(".jar"); } }); return fileObjects; } catch (Exception e) { throw new KettleFileException( "Unable to list jar files in plugin folder '" + toString() + "'", e); } } }); }
From source file:org.pentaho.di.core.hadoop.HadoopConfigurationBootstrap.java
public synchronized List<HadoopConfigurationInfo> getHadoopConfigurationInfos() throws KettleException, ConfigurationException, IOException { List<HadoopConfigurationInfo> result = new ArrayList<>(); FileObject hadoopConfigurationsDir = resolveHadoopConfigurationsDirectory(); // If the folder doesn't exist, return an empty list if (hadoopConfigurationsDir.exists()) { String activeId = getActiveConfigurationId(); String willBeActiveId = getWillBeActiveConfigurationId(); for (FileObject childFolder : hadoopConfigurationsDir.getChildren()) { if (childFolder.getType() == FileType.FOLDER) { String id = childFolder.getName().getBaseName(); FileObject configPropertiesFile = childFolder.getChild(CONFIG_PROPERTIES); if (configPropertiesFile.exists()) { Properties properties = new Properties(); properties.load(configPropertiesFile.getContent().getInputStream()); result.add(new HadoopConfigurationInfo(id, properties.getProperty("name", id), id.equals(activeId), willBeActiveId.equals(id))); }/*from www. j a v a 2 s . c o m*/ } } } return result; }
From source file:org.pentaho.di.core.util.CurrentDirectoryResolver.java
public VariableSpace resolveCurrentDirectory(VariableSpace parentVariables, RepositoryDirectoryInterface directory, String filename) { Variables tmpSpace = new Variables(); tmpSpace.setParentVariableSpace(parentVariables); tmpSpace.initializeVariablesFrom(parentVariables); if (directory != null) { tmpSpace.setVariable(Const.INTERNAL_VARIABLE_ENTRY_CURRENT_DIRECTORY, directory.toString()); tmpSpace.setVariable(Const.INTERNAL_VARIABLE_JOB_FILENAME_DIRECTORY, directory.toString()); tmpSpace.setVariable(Const.INTERNAL_VARIABLE_TRANSFORMATION_FILENAME_DIRECTORY, directory.toString()); } else if (filename != null) { try {/* ww w.j av a 2 s. c om*/ FileObject fileObject = KettleVFS.getFileObject(filename, tmpSpace); if (!fileObject.exists()) { // don't set variables if the file doesn't exist return tmpSpace; } FileName fileName = fileObject.getName(); // The filename of the transformation tmpSpace.setVariable(Const.INTERNAL_VARIABLE_JOB_FILENAME_NAME, fileName.getBaseName()); // The directory of the transformation FileName fileDir = fileName.getParent(); tmpSpace.setVariable(Const.INTERNAL_VARIABLE_TRANSFORMATION_FILENAME_DIRECTORY, fileDir.getURI()); tmpSpace.setVariable(Const.INTERNAL_VARIABLE_JOB_FILENAME_DIRECTORY, fileDir.getURI()); tmpSpace.setVariable(Const.INTERNAL_VARIABLE_ENTRY_CURRENT_DIRECTORY, fileDir.getURI()); } catch (Exception e) { } } return tmpSpace; }
From source file:org.pentaho.di.job.entries.googledrive.JobEntryGoogleDriveExport.java
protected void checkFolderExists(FileObject folder, boolean createIfNot) throws KettleException { if (folder == null) { return;/* w w w . ja v a 2 s . com*/ } try { if (!folder.exists() && createIfNot) { if (log.isDetailed()) { log.logDetailed(BaseMessages.getString(PKG, "GoogleDriveExport.Log.CreatingTargetFolder")); } folder.createFolder(); return; } else { if (!folder.exists()) { throw new KettleException(BaseMessages.getString(PKG, "GoogleDriveExport.Error.FolderNotExist", folder.getName())); } else if (!folder.isFolder()) { throw new KettleException( BaseMessages.getString(PKG, "GoogleDriveExport.Error.NotAFolder", folder.getName())); } } } catch (FileSystemException e) { throw new KettleException(e); } }
From source file:org.pentaho.di.plugins.examples.step.YamlInputMeta.java
/** * Since the exported transformation that runs this will reside in a ZIP file, we can't reference files relatively. So * what this does is turn the name of files into absolute paths OR it simply includes the resource in the ZIP file. * For now, we'll simply turn it into an absolute path and pray that the file is on a shared drive or something like * that.//from ww w . j a v a2s . c om * * @param space * the variable space to use * @param definitions * @param resourceNamingInterface * @param repository * The repository to optionally load other resources from (to be converted to XML) * @param metaStore * the metaStore in which non-kettle metadata could reside. * * @return the filename of the exported resource */ public String exportResources(VariableSpace space, Map<String, ResourceDefinition> definitions, ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore) throws KettleException { try { // The object that we're modifying here is a copy of the original! // So let's change the filename from relative to absolute by grabbing the file object... // In case the name of the file comes from previous steps, forget about this! // List<String> newFilenames = new ArrayList<String>(); if (!isInFields()) { FileInputList fileList = getFiles(space); if (fileList.getFiles().size() > 0) { for (FileObject fileObject : fileList.getFiles()) { // From : ${Internal.Transformation.Filename.Directory}/../foo/bar.xml // To : /home/matt/test/files/foo/bar.xml // // If the file doesn't exist, forget about this effort too! // if (fileObject.exists()) { // Convert to an absolute path and add it to the list. // newFilenames.add(fileObject.getName().getPath()); } } // Still here: set a new list of absolute filenames! // fileName = newFilenames.toArray(new String[newFilenames.size()]); fileMask = new String[newFilenames.size()]; // all null since converted to absolute path. fileRequired = new String[newFilenames.size()]; // all null, turn to "Y" : for (int i = 0; i < newFilenames.size(); i++) { fileRequired[i] = "Y"; } } } return null; } catch (Exception e) { throw new KettleException(e); } }
From source file:org.pentaho.di.plugins.fileopensave.providers.vfs.model.VFSDirectory.java
public static VFSDirectory create(String parent, FileObject fileObject, String connection) { VFSDirectory vfsDirectory = new VFSDirectory(); vfsDirectory.setName(fileObject.getName().getBaseName()); vfsDirectory.setPath(fileObject.getName().getFriendlyURI()); vfsDirectory.setParent(parent);//from w ww. j av a 2 s .c o m vfsDirectory.setConnection(connection); vfsDirectory.setRoot(VFSFileProvider.NAME); vfsDirectory.setCanEdit(true); vfsDirectory.setHasChildren(true); vfsDirectory.setCanAddChildren(true); try { vfsDirectory.setDate(new Date(fileObject.getContent().getLastModifiedTime())); } catch (FileSystemException e) { vfsDirectory.setDate(new Date()); } return vfsDirectory; }
From source file:org.pentaho.di.plugins.fileopensave.providers.vfs.model.VFSFile.java
public static VFSFile create(String parent, FileObject fileObject, String connection) { VFSFile vfsFile = new VFSFile(); vfsFile.setName(fileObject.getName().getBaseName()); vfsFile.setPath(fileObject.getName().getFriendlyURI()); vfsFile.setParent(parent);/*www . j a v a 2s. co m*/ vfsFile.setConnection(connection); vfsFile.setRoot(VFSFileProvider.NAME); vfsFile.setCanEdit(true); try { vfsFile.setDate(new Date(fileObject.getContent().getLastModifiedTime())); } catch (FileSystemException ignored) { vfsFile.setDate(new Date()); } return vfsFile; }
From source file:org.pentaho.di.plugins.fileopensave.providers.vfs.VFSFileProvider.java
/** * @param file/*ww w. ja va2 s . c o m*/ * @param filters * @return */ @Override public List<VFSFile> getFiles(VFSFile file, String filters) { if (file.getPath() == null) { return getRoot(file); } List<VFSFile> files = new ArrayList<>(); try { FileObject fileObject = KettleVFS.getFileObject(file.getPath(), new Variables(), VFSHelper.getOpts(file.getPath(), file.getConnection())); FileType fileType = fileObject.getType(); if (fileType.hasChildren()) { FileObject[] children = fileObject.getChildren(); for (FileObject child : children) { FileType fileType1 = child.getType(); if (fileType1.hasChildren()) { files.add(VFSDirectory.create(file.getPath(), child, file.getConnection())); } else { if (Utils.matches(child.getName().getBaseName(), filters)) { files.add(VFSFile.create(file.getPath(), child, file.getConnection())); } } } } } catch (KettleFileException | FileSystemException ignored) { // File does not exist } return files; }
From source file:org.pentaho.di.repository.KettleDatabaseRepositoryIT.java
protected void verifyJobSamples(RepositoryDirectoryInterface samplesDirectory) throws Exception { FileObject jobSamplesFolder = KettleVFS.getFileObject("samples/jobs/"); FileObject[] files = jobSamplesFolder.findFiles(new FileSelector() { @Override// w w w. j av a2 s . co m public boolean traverseDescendents(FileSelectInfo arg0) throws Exception { return true; } @Override public boolean includeFile(FileSelectInfo info) throws Exception { return info.getFile().getName().getExtension().equalsIgnoreCase("kjb"); } }); List<FileObject> filesList = Arrays.asList(files); Collections.sort(filesList, new Comparator<FileObject>() { @Override public int compare(FileObject o1, FileObject o2) { return o1.getName().getPath().compareTo(o2.getName().getPath()); } }); // test the storage of jobMeta attributes in the Kettle DB Repo if (filesList.size() > 0) { FileObject file = filesList.get(0); String jobFilename = file.getName().getPath(); System.out.println("Storing/Loading/validating job attributes"); // Load the JobMeta object... // JobMeta jobMeta = new JobMeta(jobFilename, repository); // set some attributes jobMeta.setAttribute("group", "key", "value"); jobMeta.setAttribute("test-group", "test-key-1", "test-value"); jobMeta.setAttribute("test-group", "test-key-2", "test-value"); jobMeta.setAttribute("test-group", "test-key-3", "test-value-3"); // Save it in the repository in the samples folder // jobMeta.setRepositoryDirectory(samplesDirectory); repository.save(jobMeta, "unit testing"); assertNotNull(jobMeta.getObjectId()); // Load it back up again... // JobMeta repJobMeta = repository.loadJob(jobMeta.getObjectId(), null); String value = repJobMeta.getAttribute("group", "key"); String value1 = repJobMeta.getAttribute("test-group", "test-key-1"); String value2 = repJobMeta.getAttribute("test-group", "test-key-2"); String value3 = repJobMeta.getAttribute("test-group", "test-key-3"); assertEquals("value", value); assertEquals("test-value", value1); assertEquals("test-value", value2); assertEquals("test-value-3", value3); } for (FileObject file : filesList) { String jobFilename = file.getName().getPath(); System.out.println("Storing/Loading/validating job '" + jobFilename + "'"); // Load the JobMeta object... // JobMeta jobMeta = new JobMeta(jobFilename, repository); if (Utils.isEmpty(jobMeta.getName())) { jobMeta.setName(Const.createName(file.getName().getBaseName())); } // Save it in the repository in the samples folder // jobMeta.setRepositoryDirectory(samplesDirectory); repository.save(jobMeta, "unit testing"); assertNotNull(jobMeta.getObjectId()); // Load it back up again... // JobMeta repJobMeta = repository.loadJob(jobMeta.getObjectId(), null); String oneXml = repJobMeta.getXML(); // Save & load it again // repository.save(jobMeta, "unit testing"); repJobMeta = repository.loadJob(jobMeta.getObjectId(), null); String twoXml = repJobMeta.getXML(); // The XML needs to be identical after loading // // storeFile(oneXml, "/tmp/one.ktr"); // storeFile(twoXml, "/tmp/two.ktr"); // assertEquals(oneXml, twoXml); } // Verify the number of stored files, see if we can find them all again. // System.out.println("Stored " + files.length + " job samples in folder " + samplesDirectory.getPath()); String[] jobNames = repository.getJobNames(samplesDirectory.getObjectId(), false); assertEquals(files.length, jobNames.length); }
From source file:org.pentaho.di.repository.KettleFileRepositoryIT.java
private void verifyJobSamples(RepositoryDirectoryInterface samplesDirectory) throws Exception { FileObject jobSamplesFolder = KettleVFS.getFileObject("samples/jobs/"); FileObject[] files = jobSamplesFolder.findFiles(new FileSelector() { @Override/*from w w w .j a v a2s .c o m*/ public boolean traverseDescendents(FileSelectInfo arg0) throws Exception { return true; } @Override public boolean includeFile(FileSelectInfo info) throws Exception { return info.getFile().getName().getExtension().equalsIgnoreCase("kjb"); } }); List<FileObject> filesList = Arrays.asList(files); Collections.sort(filesList, new Comparator<FileObject>() { @Override public int compare(FileObject o1, FileObject o2) { return o1.getName().getPath().compareTo(o2.getName().getPath()); } }); for (FileObject file : filesList) { String jobFilename = file.getName().getPath(); System.out.println("Storing/Loading/validating job '" + jobFilename + "'"); // Load the JobMeta object... // JobMeta jobMeta = new JobMeta(jobFilename, repository); jobMeta.setFilename(null); // The name is sometimes empty in the file, duplicates are present too... // Replaces slashes and the like as well... // jobMeta.setName(Const.createName(file.getName().getBaseName())); jobMeta.setName(jobMeta.getName().replace('/', '-')); if (Utils.isEmpty(jobMeta.getName())) { jobMeta.setName(Const.createName(file.getName().getBaseName())); } if (jobMeta.getName().contains("/")) { jobMeta.setName(jobMeta.getName().replace('/', '-')); } // Save it in the repository in the samples folder // jobMeta.setRepositoryDirectory(samplesDirectory); repository.save(jobMeta, "unit testing"); assertNotNull(jobMeta.getObjectId()); // Load it back up again... // JobMeta repJobMeta = repository.loadJob(jobMeta.getObjectId(), null); String oneXml = repJobMeta.getXML(); // Save & load it again // repository.save(jobMeta, "unit testing"); repJobMeta = repository.loadJob(jobMeta.getObjectId(), null); String twoXml = repJobMeta.getXML(); // The XML needs to be identical after loading // // storeFile(oneXml, "/tmp/one.ktr"); // storeFile(twoXml, "/tmp/two.ktr"); // assertEquals(oneXml, twoXml); } // Verify the number of stored files, see if we can find them all again. // System.out.println("Stored " + files.length + " job samples in folder " + samplesDirectory.getPath()); String[] jobNames = repository.getJobNames(samplesDirectory.getObjectId(), false); assertEquals(files.length, jobNames.length); }