List of usage examples for java.nio.file Files newDirectoryStream
public static DirectoryStream<Path> newDirectoryStream(Path dir) throws IOException
From source file:org.rascalmpl.library.experiments.Compiler.RVM.Interpreter.help.HelpManager.java
private ArrayList<IndexReader> getReaders() throws IOException { if (coursesDir.getScheme().equals("file")) { Path destDir = Paths.get(coursesDir); ArrayList<IndexReader> readers = new ArrayList<>(); for (Path p : Files.newDirectoryStream(destDir)) { if (Files.isDirectory(p) && p.getFileName().toString().matches("^[A-Z].*")) { Directory directory = FSDirectory.open(p); try { DirectoryReader ireader = DirectoryReader.open(directory); readers.add(ireader); } catch (IOException e) { stderr.println("Skipping index " + directory); }/*from ww w . j a v a2 s .co m*/ } } return readers; } throw new IOException("Cannot yet handle non-file coursesDir"); }
From source file:au.org.ands.vocabs.toolkit.provider.harvest.FileHarvestProvider.java
/** Do a harvest. Update the message parameter with the result * of the harvest./* www. j a va 2 s . co m*/ * NB: if delete is true, Tomcat must have write access, in order * to be able to delete the file successfully. However, a failed * deletion will not per se cause the subtask to fail. * @param version The version to which access points are to be added. * @param format The format of the file(s) to be harvested. * @param filePath The path to the file or directory to be harvested. * @param outputPath The directory in which to store output files. * @param delete True, if successfully harvested files are to be deleted. * @param results HashMap representing the result of the harvest. * @return True, iff the harvest succeeded. */ public final boolean getHarvestFiles(final Version version, final String format, final String filePath, final String outputPath, final boolean delete, final HashMap<String, String> results) { ToolkitFileUtils.requireDirectory(outputPath); Path filePathPath = Paths.get(filePath); Path outputPathPath = Paths.get(outputPath); if (Files.isDirectory(filePathPath)) { logger.debug("Harvesting file(s) from directory " + filePath); try (DirectoryStream<Path> stream = Files.newDirectoryStream(filePathPath)) { for (Path entry : stream) { // Only harvest files. E.g., no recursive // directory searching. if (Files.isRegularFile(entry)) { logger.debug("Harvesting file:" + entry.toString()); Path target = outputPathPath.resolve(entry.getFileName()); Files.copy(entry, target, StandardCopyOption.REPLACE_EXISTING); AccessPointUtils.createFileAccessPoint(version, format, target); if (delete) { logger.debug("Deleting file: " + entry.toString()); try { Files.delete(entry); } catch (AccessDeniedException e) { logger.error("Unable to delete file: " + entry.toString(), e); } } } } } catch (DirectoryIteratorException | IOException ex) { results.put(TaskStatus.EXCEPTION, "Exception in getHarvestFiles while copying file"); logger.error("Exception in getHarvestFiles while copying file:", ex); return false; } } else { logger.debug("Harvesting file: " + filePath); try { Path target = outputPathPath.resolve(filePathPath.getFileName()); Files.copy(filePathPath, target, StandardCopyOption.REPLACE_EXISTING); AccessPointUtils.createFileAccessPoint(version, format, target); if (delete) { logger.debug("Deleting file: " + filePathPath.toString()); try { Files.delete(filePathPath); } catch (AccessDeniedException e) { logger.error("Unable to delete file: " + filePathPath.toString(), e); } } } catch (IOException e) { results.put(TaskStatus.EXCEPTION, "Exception in getHarvestFiles while copying file"); logger.error("Exception in getHarvestFiles while copying file:", e); return false; } } // If we reached here, success, so return true. return true; }
From source file:com.vaushell.superpipes.nodes.buffer.N_Buffer.java
@Override protected void prepareImpl() throws Exception { // Load messages IDs messagesPath = getDispatcher().getDatas().resolve(getNodeID()); Files.createDirectories(messagesPath); try (final DirectoryStream<Path> stream = Files.newDirectoryStream(messagesPath)) { for (final Path p : stream) { final long ID = Long.parseLong(p.getFileName().toString()); messageIDs.add(ID);//from w ww . j a v a 2 s .co m } } }
From source file:org.onehippo.cms7.essentials.dashboard.utils.GlobalUtils.java
public static void populateDirectories(final Path startPath, final List<Path> existing) { existing.add(startPath);/*from w ww .j a va 2 s . co m*/ try (final DirectoryStream<Path> stream = Files.newDirectoryStream(startPath)) { for (Path path : stream) { if (Files.isDirectory(path)) { populateDirectories(path, existing); } } } catch (IOException e) { log.error("", e); } }
From source file:org.tallison.cc.warc.AbstractExtractor.java
private void processDirectory(Path directory) throws IOException { DirectoryStream<Path> ds = Files.newDirectoryStream(directory); for (Path p : ds) { System.err.println("processing warc1: " + p); if (Files.isDirectory(p)) { processDirectory(p);/*ww w . jav a 2 s .c o m*/ } else if (p.getFileName().toString().endsWith(".warc.gz") || p.getFileName().toString().endsWith(".warc")) { processWarc(p); } } }
From source file:company.gonapps.loghut.utils.FileUtils.java
public static void rmdir(Path directoryPath, DirectoryStream.Filter<Path> ignoringFilter) throws NotDirectoryException, IOException { if (!directoryPath.toFile().isDirectory()) throw new NotDirectoryException(directoryPath.toString()); try (DirectoryStream<Path> directoryStream = Files.newDirectoryStream(directoryPath)) { List<Path> ignoredPaths = new LinkedList<>(); for (Path path : directoryStream) { if (!ignoringFilter.accept(path)) return; ignoredPaths.add(path);//from w ww .j a v a 2 s . com } for (Path ignoredPath : ignoredPaths) { Files.delete(ignoredPath); } Files.delete(directoryPath); } }
From source file:org.apache.storm.daemon.logviewer.utils.DirectoryCleaner.java
/** * Creates DirectoryStream for give directory. * * @param dir File instance representing specific directory * @return DirectoryStream/*from w w w . j a v a2 s. c om*/ */ public DirectoryStream<Path> getStreamForDirectory(Path dir) throws IOException { try { return Files.newDirectoryStream(dir); } catch (IOException e) { numFileOpenExceptions.mark(); throw e; } }
From source file:com.basistech.yca.FlatteningConfigFileManager.java
private void initialInventory() { // The watcher only detects _changes_ try (DirectoryStream<Path> directoryStream = Files.newDirectoryStream(configurationDirectory)) { for (Path path : directoryStream) { processAddOrUpdate(path);//from w ww .j av a 2 s . c om } } catch (IOException ex) { LOG.error("Error listing initial directory contents for " + configurationDirectory, ex); } }
From source file:car_counter.processing.DefaultProcessor.java
public void processIncomingContents() { try (DirectoryStream<Path> stream = Files.newDirectoryStream(incomingDirectory)) { for (Path sourceFile : stream) { try { if (!Files.isRegularFile(sourceFile)) { logger.debug(String.format("Skipping %s - not regular file", sourceFile)); continue; }//from w ww . j a v a 2 s . c om DateTime dateTime = dateTimeExtractor.getDateTime(sourceFile); Path destinationFile = dataDirectory.resolve(UUID.randomUUID().toString()); Files.move(sourceFile, destinationFile); Collection<DetectedVehicle> detectedVehicles = carCounter.processVideo(destinationFile, dateTime); storage.store(destinationFile, detectedVehicles); logger.info(String.format("Processed '%s'@%s found %d vehicles", sourceFile, dateTime, detectedVehicles.size())); } catch (Exception e) { logger.error("Error processing file: " + sourceFile, e); } } } catch (IOException e) { logger.error("Error listing incoming directory", e); } }
From source file:org.bimserver.tools.ifcloader.BulkLoader.java
private void start() { Path basePath = Paths.get("C:\\Bulk"); Path bulkPath = basePath.resolve("bulk"); Path regularPath = basePath.resolve("single"); try (JsonBimServerClientFactory factory = new JsonBimServerClientFactory("http://localhost:8080")) { ExecutorService executorService = new ThreadPoolExecutor(16, 16, 1, TimeUnit.HOURS, new ArrayBlockingQueue<>(10000)); try (BimServerClient client = factory .create(new UsernamePasswordAuthenticationInfo("admin@bimserver.org", "admin"))) { if (Files.exists(bulkPath)) { DirectoryStream<Path> stream = Files.newDirectoryStream(bulkPath); for (Path path : stream) { executorService.submit(new Runnable() { @Override public void run() { try { SProject project = client.getServiceInterface() .addProject(path.getFileName().toString(), "ifc2x3tc1"); client.bulkCheckin(project.getOid(), path, "Automatic bulk checkin"); } catch (ServerException e) { e.printStackTrace(); } catch (UserException e) { e.printStackTrace(); } catch (PublicInterfaceNotFoundException e) { e.printStackTrace(); }//from ww w. j av a 2s . c o m } }); } } if (Files.exists(regularPath)) { DirectoryStream<Path> regularStream = Files.newDirectoryStream(regularPath); for (Path regularFile : regularStream) { executorService.submit(new Runnable() { @Override public void run() { String filename = regularFile.getFileName().toString().toLowerCase(); try { if (filename.endsWith(".ifc") || filename.endsWith(".ifczip")) { String schema = client.getServiceInterface().determineIfcVersion( extractHead(regularFile), filename.toLowerCase().endsWith(".ifczip")); SProject project = client.getServiceInterface().addProject(filename, schema); SDeserializerPluginConfiguration deserializer = client.getServiceInterface() .getSuggestedDeserializerForExtension("ifc", project.getOid()); client.checkinSync(project.getOid(), "Automatic checkin", deserializer.getOid(), false, regularFile); } else { LOGGER.info("Skipping " + filename); } } catch (Exception e) { LOGGER.error(filename, e); } } }); } } executorService.shutdown(); executorService.awaitTermination(24, TimeUnit.HOURS); } } catch (BimServerClientException e) { e.printStackTrace(); } catch (Exception e) { e.printStackTrace(); } }