List of usage examples for java.nio.file Files list
public static Stream<Path> list(Path dir) throws IOException
From source file:org.talend.dataprep.folder.store.file.FileSystemFolderRepository.java
@Override public Iterable<FolderEntry> entries(String folderId, FolderContentType contentType) { FolderPath folderPath = fromId(folderId); if (folderPath == null) { throw new TDPException(FOLDER_DOES_NOT_EXIST, build().put("id", folderId)); }//from w w w. ja v a2s. c o m final Path path = pathsConverter.toPath(folderPath); if (Files.notExists(path)) { return emptyList(); } try { try (Stream<Path> paths = Files.list(path)) { return paths.filter(pathFound -> !Files.isDirectory(pathFound)) // .map(FileSystemUtils::toFolderEntry) // .filter(entry -> Objects.equals(contentType, entry.getContentType())) // .collect(toList()); } } catch (IOException e) { throw new TDPException(UNABLE_TO_LIST_FOLDER_ENTRIES, e, build().put("path", path).put("type", contentType)); } }
From source file:org.apache.nifi.processors.hadoop.MoveHDFSTest.java
@Test public void testEmptyInputDirectory() throws IOException { MoveHDFS proc = new TestableMoveHDFS(kerberosProperties); TestRunner runner = TestRunners.newTestRunner(proc); Files.createDirectories(Paths.get(INPUT_DIRECTORY)); runner.setProperty(MoveHDFS.INPUT_DIRECTORY_OR_FILE, INPUT_DIRECTORY); runner.setProperty(MoveHDFS.OUTPUT_DIRECTORY, OUTPUT_DIRECTORY); runner.enqueue(new byte[0]); Assert.assertEquals(0, Files.list(Paths.get(INPUT_DIRECTORY)).count()); runner.run();/*from w ww . j av a2 s.c o m*/ List<MockFlowFile> flowFiles = runner.getFlowFilesForRelationship(MoveHDFS.REL_SUCCESS); runner.assertAllFlowFilesTransferred(MoveHDFS.REL_SUCCESS); Assert.assertEquals(0, flowFiles.size()); }
From source file:org.mitre.mpf.wfm.service.component.StartupComponentRegistrationServiceImpl.java
private static List<Path> listDirContent(Path dir) { if (!Files.isDirectory(dir)) { return Collections.emptyList(); }/*from w w w .j a v a 2 s. c o m*/ try (Stream<Path> dirChildren = Files.list(dir)) { return dirChildren.collect(toList()); } catch (IOException e) { throw new UncheckedIOException("Failed to list contents of: " + dir, e); } }
From source file:com.qwazr.library.archiver.ArchiverTool.java
public void extract_dir(final Path sourceDir, final String sourceExtension, final Path destDir, final Boolean logErrorAndContinue) throws IOException, ArchiveException { if (!Files.exists(sourceDir)) throw new FileNotFoundException("The source directory does not exist: " + sourceDir.toAbsolutePath()); if (!Files.exists(destDir)) throw new FileNotFoundException( "The destination directory does not exist: " + destDir.toAbsolutePath()); final Path[] sourceFiles; try (final Stream<Path> stream = Files.list(sourceDir)) { sourceFiles = stream.filter(p -> Files.isRegularFile(p)).toArray(Path[]::new); }//from w w w .j ava 2 s . c om if (sourceFiles == null) return; for (final Path sourceFile : sourceFiles) { final String ext = FilenameUtils.getExtension(sourceFile.getFileName().toString()); if (!sourceExtension.equals(ext)) continue; try { extract(sourceFile, destDir); } catch (IOException | ArchiveException e) { if (logErrorAndContinue != null && logErrorAndContinue) LOGGER.log(Level.SEVERE, e, e::getMessage); else throw e; } } }
From source file:org.sejda.core.service.TaskTestContext.java
/** * Applies the given consumer to every generated output * // w w w . j a v a 2 s. c o m * @param consumer * @return * @throws IOException */ public TaskTestContext forEachRawOutput(Consumer<Path> consumer) throws IOException { requireMultipleOutputs(); Files.list(fileOutput.toPath()).forEach(consumer); return this; }
From source file:org.jlab.clara.std.services.DataManager.java
private void saveOutputFile(FilePaths files, EngineData output) { Path outputPath = FileUtils.getParent(files.outputFile); ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); try {//from www. j a va2s .co m FileUtils.createDirectories(outputPath); CommandLine cmdLine = new CommandLine("mv"); // cmdLine.addArgument(files.stagedOutputFile.toString()); // cmdLine.addArgument(files.outputFile.toString()); // modified 09.12.18. Stage back multiple output files. vg Files.list(directoryPaths.stagePath).forEach(name -> { name.startsWith(files.stagedOutputFile.toString()); cmdLine.addArgument(name.toString()); }); cmdLine.addArgument(outputPath.toString()); // vg DefaultExecutor executor = new DefaultExecutor(); PumpStreamHandler streamHandler = new PumpStreamHandler(outputStream); executor.setStreamHandler(streamHandler); executor.execute(cmdLine); System.out.printf("%s service: output file '%s' saved to '%s'%n", NAME, files.stagedOutputFile, outputPath); returnFilePaths(output, files); } catch (ExecuteException e) { ServiceUtils.setError(output, "could not complete request: " + outputStream.toString().trim()); } catch (IOException e) { ServiceUtils.setError(output, "could not complete request: " + e.getMessage()); } }
From source file:org.jumpmind.metl.core.runtime.component.DataDiff.java
protected void calculateDiff(ISendMessageCallback callback) { Map<ModelEntity, String> changeSqls = new HashMap<>(); Map<ModelEntity, String> addSqls = new HashMap<>(); Map<ModelEntity, String> delSqls = new HashMap<>(); Component component = context.getFlowStep().getComponent(); for (ModelEntity entity : entities) { StringBuilder addSql = new StringBuilder("select "); StringBuilder chgSql = new StringBuilder(addSql); StringBuilder delSql = new StringBuilder(addSql); appendColumns(addSql, "curr.", entity); appendColumns(delSql, "orig.", entity); appendColumns(chgSql, "curr.", entity); addSql.append(" from " + entity.getName() + "_2 curr left join " + entity.getName() + "_1 orig on "); delSql.append(" from " + entity.getName() + "_1 orig left join " + entity.getName() + "_2 curr on "); chgSql.append(" from " + entity.getName() + "_1 orig join " + entity.getName() + "_2 curr on "); boolean secondPk = false; for (ModelAttribute attribute : entity.getModelAttributes()) { if (attribute.isPk()) { if (secondPk) { addSql.append(" and "); delSql.append(" and "); chgSql.append(" and "); }//from w ww .java2s.co m addSql.append("curr.").append(attribute.getName()).append("=").append("orig.") .append(attribute.getName()); delSql.append("curr.").append(attribute.getName()).append("=").append("orig.") .append(attribute.getName()); chgSql.append("curr.").append(attribute.getName()).append("=").append("orig.") .append(attribute.getName()); secondPk = true; } } addSql.append(" where "); delSql.append(" where "); chgSql.append(" where "); secondPk = false; boolean secondCol = false; for (ModelAttribute attribute : entity.getModelAttributes()) { if (attribute.isPk()) { if (secondPk) { addSql.append(" or "); delSql.append(" or "); } addSql.append("orig.").append(attribute.getName()).append(" is null"); delSql.append("curr.").append(attribute.getName()).append(" is null"); secondPk = true; } else { ComponentAttributeSetting matchColumnSetting = component .getSingleAttributeSetting(attribute.getId(), DataDiff.ATTRIBUTE_COMPARE_ENABLED); boolean matchColumn = matchColumnSetting != null ? Boolean.parseBoolean(matchColumnSetting.getValue()) : true; if (matchColumn) { if (secondCol) { chgSql.append(" or "); } chgSql.append("curr.").append(attribute.getName()).append(" != ").append("orig.") .append(attribute.getName()); chgSql.append(" or "); chgSql.append("curr.").append(attribute.getName()).append(" is null and ").append("orig.") .append(attribute.getName()).append(" is not null "); chgSql.append(" or "); chgSql.append("curr.").append(attribute.getName()).append(" is not null and ") .append("orig.").append(attribute.getName()).append(" is null "); secondCol = true; } } } //we only want to do a change compare if this entity has //cols to compare other than the primary key. if (!entity.hasOnlyPrimaryKeys() && secondCol) { changeSqls.put(entity, chgSql.toString()); log(LogLevel.INFO, "Generated diff sql for CHG: %s", chgSql); } log(LogLevel.INFO, "Generated diff sql for ADD: %s", addSql); log(LogLevel.INFO, "Generated diff sql for DEL: %s", delSql); addSqls.put(entity, addSql.toString()); delSqls.put(entity, delSql.toString()); } RdbmsReader reader = new RdbmsReader(); reader.setDataSource(databasePlatform.getDataSource()); reader.setContext(context); reader.setComponentDefinition(componentDefinition); reader.setRowsPerMessage(rowsPerMessage); reader.setThreadNumber(threadNumber); for (ModelEntity entity : entities) { ComponentEntitySetting add = component.getSingleEntitySetting(entity.getId(), DataDiff.ENTITY_ADD_ENABLED); ComponentEntitySetting chg = component.getSingleEntitySetting(entity.getId(), DataDiff.ENTITY_CHG_ENABLED); boolean addEnabled = add != null ? Boolean.parseBoolean(add.getValue()) : true; boolean chgEnabled = chg != null ? Boolean.parseBoolean(chg.getValue()) : true; if (addEnabled) { reader.setSql(addSqls.get(entity)); reader.setEntityChangeType(ChangeType.ADD); reader.handle(new ControlMessage(this.context.getFlowStep().getId()), callback, false); info("Sent %d ADD records for %s", reader.getRowReadDuringHandle(), entity.getName()); } if (chgEnabled && changeSqls.get(entity) != null) { reader.setSql(changeSqls.get(entity)); reader.setEntityChangeType(ChangeType.CHG); reader.handle(new ControlMessage(this.context.getFlowStep().getId()), callback, false); info("Sent %d CHG records for %s", reader.getRowReadDuringHandle(), entity.getName()); } } for (int i = entities.size() - 1; i >= 0; i--) { ModelEntity entity = entities.get(i); ComponentEntitySetting del = component.getSingleEntitySetting(entity.getId(), DataDiff.ENTITY_DEL_ENABLED); boolean delEnabled = del != null ? Boolean.parseBoolean(del.getValue()) : true; if (delEnabled) { reader.setSql(delSqls.get(entity)); reader.setEntityChangeType(ChangeType.DEL); reader.handle(new ControlMessage(this.context.getFlowStep().getId()), callback, false); info("Sent %d DEL records for %s", reader.getRowReadDuringHandle(), entity.getName()); } } ResettableBasicDataSource ds = databasePlatform.getDataSource(); ds.close(); if (!inMemoryCompare) { try { Files.list(Paths.get(System.getProperty("h2.baseDir"))) .filter(path -> path.toFile().getName().startsWith(databaseName)) .forEach(path -> deleteDatabaseFile(path.toFile())); } catch (IOException e) { log.warn("Failed to delete file", e); } } databasePlatform = null; databaseName = null; databaseWriter = null; }
From source file:org.corehunter.services.simple.SimpleCoreHunterRunServices.java
private void initialise() throws IOException { Path resultsPath = Paths.get(getPath().toString(), RESULTS_PATH); if (!Files.exists(resultsPath)) { Files.createDirectories(resultsPath); }//from www . j a va 2 s. c o m Iterator<Path> iterator = Files.list(resultsPath).iterator(); while (iterator.hasNext()) { loadResult(iterator.next()); } }
From source file:org.eclipse.cdt.arduino.core.internal.board.ArduinoManager.java
private synchronized void initPackages() throws CoreException { init();/*from ww w . j ava 2 s . c o m*/ if (packages == null) { packages = new HashMap<>(); try { Files.list(ArduinoPreferences.getArduinoHome()) .filter(path -> path.getFileName().toString().startsWith("package_")) //$NON-NLS-1$ .forEach(path -> { try (Reader reader = new FileReader(path.toFile())) { PackageIndex index = new Gson().fromJson(reader, PackageIndex.class); for (ArduinoPackage pkg : index.getPackages()) { ArduinoPackage p = packages.get(pkg.getName()); if (p == null) { pkg.init(); packages.put(pkg.getName(), pkg); } else { p.merge(pkg); } } } catch (IOException e) { Activator.log(e); } }); } catch (IOException e) { throw Activator.coreException(e); } } }
From source file:com.asakusafw.workflow.executor.TaskExecutors.java
private static Set<Path> findLibraries(Path directory) { if (Files.isDirectory(directory) == false) { return Collections.emptySet(); }//from w w w . j av a2 s. c o m try { return Files.list(directory) .filter(it -> Optional.of(it.getFileName()) .map(name -> name.toString().endsWith(EXTENSION_LIBRARY)).orElse(false)) .collect(Collectors.toSet()); } catch (IOException e) { LOG.warn("failed to list directory entries: {}", directory, e); return Collections.emptySet(); } }