List of usage examples for java.io FilenameFilter FilenameFilter
FilenameFilter
From source file:br.edimarmanica.trinity.extract.Extract.java
private void execute(int offset) throws IOException, REException { int start = ((WINDOW_SIZE - NR_SHARED_PAGES) * offset) + NR_SHARED_PAGES; int end = start + WINDOW_SIZE - NR_SHARED_PAGES; if (General.DEBUG) { System.out.println("Starting extracting"); }/* ww w .j av a2s .c o m*/ File dir = new File(Paths.PATH_BASE + site.getPath()); int i = 0; for (File page : dir.listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.endsWith(".html") || name.endsWith(".htm"); } })) { if (i < NR_SHARED_PAGES || (i >= start && i < end)) { if (General.DEBUG) { System.out.println("\t Page (" + i + "): " + page.getName()); } execute(page, offset); } i++; } if (General.DEBUG) { System.out.println("Ending extracting"); } }
From source file:interactivespaces.configuration.FileSystemConfigurationStorageManager.java
/** * Get all configuration files from the Interactive Spaces configuration folder. * * @return all files in the configuration folder *//*from www.ja v a 2s .co m*/ private File[] getConfigFiles() { File configurationFolder = new File(configFolder, ContainerFilesystemLayout.FOLDER_CONFIG_INTERACTIVESPACES); if (configurationFolder.exists()) { if (!configurationFolder.isDirectory()) { throw new SimpleInteractiveSpacesException(String.format( "Interactive Spaces configuration folder %s is not a directory", configurationFolder)); } } else { throw new SimpleInteractiveSpacesException(String .format("Interactive Spaces configuration folder %s does not exist", configurationFolder)); } File[] configFiles = configurationFolder.listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { String filename = name.toLowerCase(); return filename.endsWith(CONFIGURATION_FILE_EXTENSION); } }); if (configFiles.length == 0) { throw new SimpleInteractiveSpacesException( String.format("Interactive Spaces configuration folder %s contains no files ending with %s", configurationFolder, CONFIGURATION_FILE_EXTENSION)); } return configFiles; }
From source file:com.nullwire.trace.ExceptionHandler.java
/** * Search for logfiles.//from ww w.ja v a 2 s. c o m * @return */ private static String[] searchForLogs() { File dir = new File(G.FILES_PATH + "/"); // Try to create the files folder if it doesn't exist dir.mkdir(); // Filter for ".log" files FilenameFilter filter = new FilenameFilter() { public boolean accept(File dir, String name) { return name.endsWith(".log"); } }; return dir.list(filter); }
From source file:org.openspaces.usm.USMRollingFileAppenderTailerTest.java
private void assertFileRolling() { File folder = new File(logsDirectory); //Get list of files according to regex. File[] files = folder.listFiles(new FilenameFilter() { @Override/*from w w w .j a v a 2 s.co m*/ public boolean accept(File dir, String name) { return java.util.regex.Pattern.matches("my.*\\.log.*", name); } }); //Check file rolling occurred. Assert.assertTrue(files.length == EXPECTED_NUMBER_OF_FILE_PARTS); //Test file sizes for (File file : files) { long fileSize = file.length(); double fileSizeInKB = (double) fileSize / LOG_IO_BUFFER_SIZE_BYTES; //assert no file is over the size limit defined in the RollingFileAppender. Assert.assertTrue(fileSizeInKB < MAX_LOG_FILE_SIZE_DOUBLE + 1); } }
From source file:com.linkedin.thirdeye.hadoop.backfill.BackfillControllerAPIs.java
/** * Downloads a segment from the controller, given the table name and segment name * @param segmentName/* w w w. j a va 2s .co m*/ * @param hdfsSegmentPath * @throws IOException * @throws ArchiveException */ public void downloadSegment(String segmentName, Path hdfsSegmentPath) throws IOException, ArchiveException { FileSystem fs = FileSystem.get(new Configuration()); HttpClient controllerClient = new DefaultHttpClient(); HttpGet req = new HttpGet(SEGMENTS_ENDPOINT + URLEncoder.encode(tableName, UTF_8) + "/" + URLEncoder.encode(segmentName, UTF_8)); HttpResponse res = controllerClient.execute(controllerHttpHost, req); try { if (res.getStatusLine().getStatusCode() != 200) { throw new IllegalStateException(res.getStatusLine().toString()); } LOGGER.info("Fetching segment {}", segmentName); InputStream content = res.getEntity().getContent(); File tempDir = new File(Files.createTempDir(), "thirdeye_temp"); tempDir.mkdir(); LOGGER.info("Creating temporary dir for staging segments {}", tempDir); File tempSegmentDir = new File(tempDir, segmentName); File tempSegmentTar = new File(tempDir, segmentName + ThirdEyeConstants.TAR_SUFFIX); LOGGER.info("Downloading {} to {}", segmentName, tempSegmentTar); OutputStream out = new FileOutputStream(tempSegmentTar); IOUtils.copy(content, out); if (!tempSegmentTar.exists()) { throw new IllegalStateException("Download of " + segmentName + " unsuccessful"); } LOGGER.info("Extracting segment {} to {}", tempSegmentTar, tempDir); TarGzCompressionUtils.unTar(tempSegmentTar, tempDir); File[] files = tempDir.listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { return !name.endsWith(ThirdEyeConstants.TAR_SUFFIX) && new File(dir, name).isDirectory(); } }); if (files.length == 0) { throw new IllegalStateException("Failed to extract " + tempSegmentTar + " to " + tempDir); } else if (!files[0].getName().equals(tempSegmentDir.getName())) { LOGGER.info("Moving extracted segment to the segment dir {}", tempSegmentDir); FileUtils.moveDirectory(files[0], tempSegmentDir); } if (!tempSegmentDir.exists()) { throw new IllegalStateException("Failed to move " + files[0] + " to " + tempSegmentDir); } LOGGER.info("Copying segment from {} to hdfs {}", tempSegmentDir, hdfsSegmentPath); fs.copyFromLocalFile(new Path(tempSegmentDir.toString()), hdfsSegmentPath); Path hdfsSegmentDir = new Path(hdfsSegmentPath, segmentName); if (!fs.exists(hdfsSegmentDir)) { throw new IllegalStateException("Failed to copy segment " + segmentName + " from local path " + tempSegmentDir + " to hdfs path " + hdfsSegmentPath); } } finally { if (res.getEntity() != null) { EntityUtils.consume(res.getEntity()); } } LOGGER.info("Successfully downloaded segment {} to {}", segmentName, hdfsSegmentPath); }
From source file:com.bahmanm.karun.PackageCollection.java
/** * Extracts all .db archives in temp directory. * //from w w w .j a va2s. c o m * @throws IOException * @throws ArchiveException */ private void extractAllDbArchives() throws IOException, ArchiveException { File dbDir = new File(dbPathSystem + "/sync/"); File[] dbFiles = dbDir.listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.endsWith(".db"); } }); for (int i = 0; i < dbFiles.length; i++) { String fileName = dbFiles[i].getName(); File dir = new File( dbPathTempSync.getAbsolutePath() + "/" + fileName.substring(0, fileName.length() - 3)); if (dir.exists()) return; dir.mkdir(); Utils.extractTarGz(dbFiles[i].getAbsolutePath(), dir); } }
From source file:Main.StaticTools.java
private static void removeFiles(File file) { File[] directories = file.listFiles((File dir, String name) -> dir.isDirectory()); for (File dir1 : directories) { if (dir1.isDirectory()) { File[] content = dir1.listFiles(new FilenameFilter() { public boolean accept(File dir, String name) { name = name.toLowerCase(); return name.endsWith("__highres.jpg"); }/* www . ja v a 2 s. c om*/ }); if (content.length > 0) { for (int i = 0; i < content.length; i++) { String absolutePath = content[i].getAbsolutePath(); try { Files.deleteIfExists( Paths.get(absolutePath.substring(0, absolutePath.length() - 13) + ".jpg")); } catch (IOException e) { errorOut(content[i].getName(), e); } } } } } }
From source file:de.kaiserpfalzEdv.maven.apacheds.config.LdifLoader.java
private void loadLdifsInDirectory(final File directory) throws FileNotFoundException, LdapException, MojoExecutionException { logger.debug("Loading LDIFs from session: " + directory); for (File ldif : directory.listFiles(new FilenameFilter() { @Override/*from w w w.jav a2 s .com*/ public boolean accept(final File dir, final String name) { return name.endsWith(ldifFileExtension); } })) { loadLdif(ldif); } }
From source file:com.freemedforms.openreact.db.DbSchema.java
public static void dbPatcher(String patchLocation) { log.info("Database patching started for " + patchLocation); File patchDirectoryObject = new File(patchLocation); String[] children = patchDirectoryObject.list(new FilenameFilter() { @Override//from w w w .j av a 2s.c o m public boolean accept(File file, String name) { log.debug("file = " + file + ", name = " + name); if (name.startsWith(".")) { log.debug("Skipping " + name + " (dot file)"); return false; } if (!name.endsWith(".sql")) { log.debug("Skipping " + name + " (doesn't end with .sql)"); return false; } return true; } }); if (children != null) { // Sort all patches into name order. Arrays.sort(children); // Process patches log.info("Found " + children.length + " patches to process"); for (String patchFilename : children) { String patchName = FilenameUtils.getBaseName(patchFilename); if (DbSchema.isPatchApplied(patchName)) { log.info("Patch " + patchName + " already applied."); continue; } else { log.info("Applying patch " + patchName + ", source file = " + patchFilename); boolean success; try { success = DbSchema.applyPatch( patchDirectoryObject.getAbsolutePath() + File.separatorChar + patchFilename); } catch (SQLException e) { log.error(e); success = false; } if (success) { DbSchema.recordPatch(patchName); } else { log.error("Failed to apply " + patchName + ", stopping patch sequence."); return; } } } } log.info("Database patching completed"); }