List of usage examples for org.apache.commons.io FileUtils listFiles
public static Collection listFiles(File directory, String[] extensions, boolean recursive)
From source file:com.indoqa.maven.wadldoc.AbstractWadlDocumentationMojo.java
protected void executeReport(Locale unusedLocale) throws MavenReportException { if (this.skip) { this.getLog().info("Skipping javadoc generation"); return;/* w ww .ja v a 2 s.c om*/ } // make sure that the output directory exists this.outputDirectory.mkdirs(); // fix for some Maven classloading problems in conjunction with JAXP that // relies on a set thread context classloader ... Thread.currentThread().setContextClassLoader(this.getClass().getClassLoader()); this.getLog().debug("Reading WADL files from: " + this.inputDirectory); @SuppressWarnings("unchecked") Collection<File> wadlFiles = FileUtils.listFiles(this.inputDirectory, new String[] { "xml", "wadl" }, false); if (wadlFiles.size() <= 0) { this.getLog().info("No WADL files. Nothing to do."); } // WADL to HTML transformation this.transformWadlDocuments2HTMLDocuments(wadlFiles); // copy stylesheet this.copyStylesheet(); // index page HtmlDocument startPage = this.writeIndexPage(wadlFiles); // frameset this.writeFrameset(wadlFiles, startPage); }
From source file:com.stevpet.sonar.plugins.dotnet.mscover.vstest.clean.CleanTest.java
private void expectFilesInDir(File testDir, int count) { Collection<File> files = FileUtils.listFiles(testDir, TrueFileFilter.INSTANCE, TrueFileFilter.INSTANCE); assertEquals(count, files.size());/*from w w w .ja v a 2s . c om*/ }
From source file:averroes.JarOrganizer.java
/** * Process the JRE archives (recognized JAR files are: rt.jar, jsse.jar, * jce.jar)./*from w w w. j av a 2s. co m*/ * * @param dir */ private void processJreArchives(String dir) { File directory = new File(dir); org.apache.commons.io.filefilter.IOFileFilter nameFilter = FileFilterUtils.or( FileFilterUtils.nameFileFilter("rt.jar"), FileFilterUtils.nameFileFilter("jsse.jar"), FileFilterUtils.nameFileFilter("jce.jar")); FileUtils.listFiles(directory, nameFilter, FileFilterUtils.trueFileFilter()) .forEach(file -> processArchive(file.getPath(), false)); }
From source file:com.marklogic.entityservices.tests.TestSetup.java
@SuppressWarnings("unchecked") Collection<File> getTestResources(String dirName) { URL filesUrl = _client.getClass().getResource(dirName); return FileUtils.listFiles(new File(filesUrl.getPath()), FileFilterUtils.trueFileFilter(), FileFilterUtils.trueFileFilter()); }
From source file:com.aionemu.commons.scripting.impl.ScriptContextImpl.java
/** * {@inheritDoc}//from ww w .j a va2s. co m */ @Override public synchronized void init() { if (compilationResult != null) { log.error(new Exception("Init request on initialized ScriptContext")); return; } ScriptCompiler scriptCompiler = instantiateCompiler(); @SuppressWarnings("unchecked") Collection<File> files = FileUtils.listFiles(root, scriptCompiler.getSupportedFileTypes(), true); if (parentScriptContext != null) { scriptCompiler.setParentClassLoader(parentScriptContext.getCompilationResult().getClassLoader()); } scriptCompiler.setLibraires(libraries); compilationResult = scriptCompiler.compile(files); getClassListener().postLoad(compilationResult.getCompiledClasses()); if (childScriptContexts != null) { for (ScriptContext context : childScriptContexts) { context.init(); } } }
From source file:fr.paris.lutece.util.jpa.JPAPersistenceUnitPostProcessor.java
/** * Search for <code>WEB-INF/conf/plugins/*.orm.xml</code>. * @return list of files found/* ww w .j a v a2 s.c o m*/ */ private Collection<File> getListORMFiles() { String strConfPath = AppPathService.getAbsolutePathFromRelativePath(PATH_CONF); File dirConfPlugins = new File(strConfPath); return FileUtils.listFiles(dirConfPlugins, FileFilterUtils.suffixFileFilter(SUFFIX_ORM_XML), TrueFileFilter.INSTANCE); }
From source file:com.example.mydtapp.JdbcInputAppTest.java
@Test public void testApplication() throws Exception { try {// www.j av a2 s . c o m LocalMode lma = LocalMode.newInstance(); Configuration conf = new Configuration(false); conf.addResource(this.getClass().getResourceAsStream("/META-INF/properties-SimpleJdbcToHDFSApp.xml")); lma.prepareDAG(new JdbcHDFSApp(), conf); LocalMode.Controller lc = lma.getController(); lc.runAsync(); // wait for output files to roll Thread.sleep(5000); String[] extensions = { "dat.0", "tmp" }; Collection<File> list = FileUtils.listFiles(new File(FILE_NAME), extensions, false); Assert.assertEquals("Records in file", 10, FileUtils.readLines(list.iterator().next()).size()); } catch (ConstraintViolationException e) { Assert.fail("constraint violations: " + e.getConstraintViolations()); } }
From source file:com.silverpeas.util.ZipManager.java
/** * Mthode compressant un dossier de faon rcursive au format zip. * * @param folderToZip - dossier compresser * @param zipFile - fichier zip creer//w ww .j a v a2s . c o m * @return la taille du fichier zip gnr en octets * @throws FileNotFoundException * @throws IOException */ public static long compressPathToZip(File folderToZip, File zipFile) throws IOException { ZipArchiveOutputStream zos = null; try { // cration du flux zip zos = new ZipArchiveOutputStream(new FileOutputStream(zipFile)); zos.setFallbackToUTF8(true); zos.setCreateUnicodeExtraFields(NOT_ENCODEABLE); zos.setEncoding(CharEncoding.UTF_8); Collection<File> folderContent = FileUtils.listFiles(folderToZip, null, true); for (File file : folderContent) { String entryName = file.getPath().substring(folderToZip.getParent().length() + 1); entryName = FilenameUtils.separatorsToUnix(entryName); zos.putArchiveEntry(new ZipArchiveEntry(entryName)); InputStream in = new FileInputStream(file); IOUtils.copy(in, zos); zos.closeArchiveEntry(); IOUtils.closeQuietly(in); } } finally { if (zos != null) { IOUtils.closeQuietly(zos); } } return zipFile.length(); }
From source file:com.ning.metrics.collector.hadoop.processing.TestHadoopWriterFactory.java
private void testProcessLeftBelowFilesTooSoon() throws Exception { final HadoopWriterFactory factory = new NoWriteHadoopWriterFactory(null, config); FileUtils.touch(new File(lockDirectory.getPath() + "/some_file_which_should_be_sent_1")); FileUtils.touch(new File(lockDirectory.getPath() + "/some_file_which_should_be_sent_2")); FileUtils.touch(new File(quarantineDirectory.getPath() + "/some_other_file_which_should_be_sent")); Assert.assertEquals(FileUtils .listFiles(spoolDirectory, FileFilterUtils.trueFileFilter(), FileFilterUtils.trueFileFilter()) .size(), 3);//from w w w . j a v a 2s . com Assert.assertTrue(spoolDirectory.exists()); Assert.assertTrue(tmpDirectory.exists()); Assert.assertTrue(lockDirectory.exists()); Assert.assertTrue(quarantineDirectory.exists()); // No sleep! factory.processLeftBelowFiles(); // No file should have been sent Assert.assertEquals(FileUtils .listFiles(spoolDirectory, FileFilterUtils.trueFileFilter(), FileFilterUtils.trueFileFilter()) .size(), 3); Assert.assertTrue(spoolDirectory.exists()); Assert.assertTrue(tmpDirectory.exists()); Assert.assertTrue(lockDirectory.exists()); Assert.assertTrue(quarantineDirectory.exists()); // We could even test the mapping in HDFS here (with the keys) Assert.assertEquals(hdfs.values().size(), 0); }
From source file:com.ning.arecibo.collector.process.TestFileBackedBuffer.java
@Test(groups = "slow") public void testAppend() throws Exception { final List<Event> eventsSent = new ArrayList<Event>(); // Sanity check before the tests Assert.assertEquals(timelineEventHandler.getBackingBuffer().getFilesCreated(), 0); Assert.assertEquals(FileUtils.listFiles(basePath, new String[] { "bin" }, false).size(), 0); // Send enough events to spill over to disk final DateTime startTime = new DateTime(DateTimeZone.UTC); for (int i = 0; i < NB_EVENTS; i++) { final Event event = new MapEvent(startTime.plusSeconds(i).getMillis(), "NOT_USED", HOST_UUID, EVENT); processor.processEvent(event);// w w w . j ava 2 s . com eventsSent.add(event); } // Check the files have been created (at least one per accumulator) final long bytesOnDisk = timelineEventHandler.getBackingBuffer().getBytesOnDisk(); Assert.assertTrue(timelineEventHandler.getBackingBuffer().getFilesCreated() > 0); final Collection<File> writtenFiles = FileUtils.listFiles(basePath, new String[] { "bin" }, false); Assert.assertTrue(writtenFiles.size() > 0); log.info("Sent {} events and wrote {} bytes on disk ({} bytes/event)", new Object[] { NB_EVENTS, bytesOnDisk, bytesOnDisk / NB_EVENTS }); // Replay the events. Note that eventsSent != eventsReplayed as some of the ones sent are still in memory final Replayer replayer = new Replayer(basePath.getAbsolutePath()); final List<HostSamplesForTimestamp> eventsReplayed = replayer.readAll(); for (int i = 0; i < eventsReplayed.size(); i++) { Assert.assertEquals(eventsReplayed.get(i).getTimestamp().getMillis(), eventsSent.get(i).getTimestamp()); Assert.assertEquals(eventsReplayed.get(i).getCategory(), eventsSent.get(i).getEventType()); } // Make sure files have been deleted Assert.assertEquals(FileUtils.listFiles(basePath, new String[] { "bin" }, false).size(), 0); }