Example usage for java.nio.file Files list

List of usage examples for java.nio.file Files list

Introduction

In this page you can find the example usage for java.nio.file Files list.

Prototype

public static Stream<Path> list(Path dir) throws IOException 

Source Link

Document

Return a lazily populated Stream , the elements of which are the entries in the directory.

Usage

From source file:Main.java

public static void main(String[] args) throws Exception {
    Files.list(Paths.get("c:/Java_Dev")).sorted().forEach(System.out::println);
}

From source file:eu.crydee.stanfordcorenlp.Tokenizer.java

/**
 * Wrapper around Stanford CoreNLP to tokenize text.
 *
 * Give it an input dir of text files with --input-dir and it'll ouput
 * tokenized versions, one sentence per line with space separated words to
 * --output-dir (defaults to out/).//from  w  w  w .  j  av  a2s  .com
 *
 * @param args CLI args. Example: --input-dir my-input --output-dir
 * my-output.
 */
public static void main(String[] args) {
    ArgumentParser parser = ArgumentParsers.newArgumentParser("stanford-corenlp-tokenizer-wrapper")
            .description("Converts Mediawiki dumps to text.");
    parser.addArgument("-i", "--input-dir").required(true).help("Path of the input text files directory.");
    parser.addArgument("-o", "--output-dir").help("Path of the output text files directory.").setDefault("out");
    Params params = new Params();
    try {
        parser.parseArgs(args, params);
    } catch (ArgumentParserException ex) {
        System.err.println("Could not parse arguments: " + ex.getMessage());
        System.exit(1);
    }
    Tokenizer tokenizer = new Tokenizer();

    try {
        Files.list(Paths.get(params.inDirPath)).filter(Files::isRegularFile).map(Path::toFile).map(f -> {
            try {
                return Pair.of(f.getName(), FileUtils.readFileToString(f, StandardCharsets.UTF_8));
            } catch (IOException ex) {
                System.err.println("Could not read input text file: " + ex.getLocalizedMessage());
                throw new UncheckedIOException(ex);
            }
        }).forEach(p -> {
            String text = tokenizer.tokenizeAndSentenceSplit(p.getRight());
            try {
                FileUtils.writeStringToFile(Paths.get(params.outDirpath, p.getLeft()).toFile(), text,
                        StandardCharsets.UTF_8);
            } catch (IOException ex) {
                System.err.println("Could not write output text file: " + ex.getLocalizedMessage());
            }
        });
    } catch (IOException ex) {
        System.err.println("Could not read from input directory: " + ex.getLocalizedMessage());
    }
}

From source file:com.github.ffremont.microservices.springboot.node.tasks.AbstractInstallTest.java

private void remove(Path path) throws IOException {
    Files.list(path).forEach(item -> {
        try {/*from  www  .  jav a 2 s .com*/
            if (Files.isDirectory(item)) {
                this.remove(item);
            }

            Files.delete(item);
        } catch (IOException e) {
            LOG.error("Impossible de nettoyer", e);
        }
    });
}

From source file:pl.p.lodz.ftims.server.logic.ChallengeServiceTest.java

@AfterClass
public static void deleteImgDir() throws IOException {
    Stream<Path> stream = Files.list(Paths.get(PHOTOS_DIR));
    stream.forEach((Path p) -> {
        try {/*from ww  w  .ja  v a2  s .co  m*/
            Files.delete(p);
        } catch (IOException ex) {
            Logger.getLogger(ChallengeServiceTest.class.getName()).log(Level.SEVERE, null, ex);
        }
    });
    Files.deleteIfExists(Paths.get(PHOTOS_DIR));
}

From source file:com.github.ansell.shp.SHPDump.java

public static void main(String... args) throws Exception {
    final OptionParser parser = new OptionParser();

    final OptionSpec<Void> help = parser.accepts("help").forHelp();
    final OptionSpec<File> input = parser.accepts("input").withRequiredArg().ofType(File.class).required()
            .describedAs("The input SHP file");
    final OptionSpec<File> output = parser.accepts("output").withRequiredArg().ofType(File.class).required()
            .describedAs("The output directory to use for debugging files");
    final OptionSpec<String> outputPrefix = parser.accepts("prefix").withRequiredArg().ofType(String.class)
            .defaultsTo("shp-debug").describedAs("The output prefix to use for debugging files");
    final OptionSpec<File> outputMappingTemplate = parser.accepts("output-mapping").withRequiredArg()
            .ofType(File.class).describedAs("The output mapping template file if it needs to be generated.");
    final OptionSpec<Integer> resolution = parser.accepts("resolution").withRequiredArg().ofType(Integer.class)
            .defaultsTo(2048).describedAs("The output image file resolution");
    final OptionSpec<String> format = parser.accepts("format").withRequiredArg().ofType(String.class)
            .defaultsTo("png").describedAs("The output image format");
    final OptionSpec<String> removeIfEmpty = parser.accepts("remove-if-empty").withRequiredArg()
            .ofType(String.class).describedAs(
                    "The name of an attribute to remove if its value is empty before outputting the resulting shapefile. Use multiple times to specify multiple fields to check");

    OptionSet options = null;/* w  w w .  j a v a  2 s .co  m*/

    try {
        options = parser.parse(args);
    } catch (final OptionException e) {
        System.out.println(e.getMessage());
        parser.printHelpOn(System.out);
        throw e;
    }

    if (options.has(help)) {
        parser.printHelpOn(System.out);
        return;
    }

    final Path inputPath = input.value(options).toPath();
    if (!Files.exists(inputPath)) {
        throw new FileNotFoundException("Could not find input SHP file: " + inputPath.toString());
    }

    final Path outputPath = output.value(options).toPath();
    if (!Files.exists(outputPath)) {
        throw new FileNotFoundException("Output directory does not exist: " + outputPath.toString());
    }

    final Path outputMappingPath = options.has(outputMappingTemplate)
            ? outputMappingTemplate.value(options).toPath()
            : null;
    if (options.has(outputMappingTemplate) && Files.exists(outputMappingPath)) {
        throw new FileNotFoundException(
                "Output mapping template file already exists: " + outputMappingPath.toString());
    }

    final Set<String> filterFields = ConcurrentHashMap.newKeySet();
    if (options.has(removeIfEmpty)) {
        for (String nextFilterField : removeIfEmpty.values(options)) {
            System.out.println("Will filter field if empty value found: " + nextFilterField);
            filterFields.add(nextFilterField);
        }
    }

    if (!filterFields.isEmpty()) {
        System.out.println("Full set of filter fields: " + filterFields);
    }

    final String prefix = outputPrefix.value(options);

    FileDataStore store = FileDataStoreFinder.getDataStore(inputPath.toFile());

    if (store == null) {
        throw new RuntimeException("Could not read the given input as an ESRI Shapefile: "
                + inputPath.toAbsolutePath().toString());
    }

    for (String typeName : new LinkedHashSet<>(Arrays.asList(store.getTypeNames()))) {
        System.out.println("");
        System.out.println("Type: " + typeName);
        SimpleFeatureSource featureSource = store.getFeatureSource(typeName);
        SimpleFeatureType schema = featureSource.getSchema();

        Name outputSchemaName = new NameImpl(schema.getName().getNamespaceURI(),
                schema.getName().getLocalPart().replace(" ", "").replace("%20", ""));
        System.out.println("Replacing name on schema: " + schema.getName() + " with " + outputSchemaName);
        SimpleFeatureType outputSchema = SHPUtils.changeSchemaName(schema, outputSchemaName);

        List<String> attributeList = new ArrayList<>();
        for (AttributeDescriptor attribute : schema.getAttributeDescriptors()) {
            System.out.println("Attribute: " + attribute.getName().toString());
            attributeList.add(attribute.getName().toString());
        }
        CsvSchema csvSchema = CSVUtil.buildSchema(attributeList);

        SimpleFeatureCollection collection = featureSource.getFeatures();
        int featureCount = 0;
        Path nextCSVFile = outputPath.resolve(prefix + ".csv");
        Path nextSummaryCSVFile = outputPath
                .resolve(prefix + "-" + outputSchema.getTypeName() + "-Summary.csv");
        List<SimpleFeature> outputFeatureList = new CopyOnWriteArrayList<>();

        try (SimpleFeatureIterator iterator = collection.features();
                Writer bufferedWriter = Files.newBufferedWriter(nextCSVFile, StandardCharsets.UTF_8,
                        StandardOpenOption.CREATE_NEW);
                SequenceWriter csv = CSVUtil.newCSVWriter(bufferedWriter, csvSchema);) {
            List<String> nextLine = new ArrayList<>();
            while (iterator.hasNext()) {
                SimpleFeature feature = iterator.next();
                featureCount++;
                if (featureCount <= 2) {
                    System.out.println("");
                    System.out.println(feature.getIdentifier());
                } else if (featureCount % 100 == 0) {
                    System.out.print(".");
                }
                boolean filterThisFeature = false;
                for (AttributeDescriptor attribute : schema.getAttributeDescriptors()) {
                    String featureString = Optional.ofNullable(feature.getAttribute(attribute.getName()))
                            .orElse("").toString();
                    nextLine.add(featureString);
                    if (filterFields.contains(attribute.getName().toString())
                            && featureString.trim().isEmpty()) {
                        filterThisFeature = true;
                    }
                    if (featureString.length() > 100) {
                        featureString = featureString.substring(0, 100) + "...";
                    }
                    if (featureCount <= 2) {
                        System.out.print(attribute.getName() + "=");
                        System.out.println(featureString);
                    }
                }
                if (!filterThisFeature) {
                    outputFeatureList.add(SHPUtils.changeSchemaName(feature, outputSchema));
                    csv.write(nextLine);
                }
                nextLine.clear();
            }
        }
        try (Reader csvReader = Files.newBufferedReader(nextCSVFile, StandardCharsets.UTF_8);
                Writer summaryOutput = Files.newBufferedWriter(nextSummaryCSVFile, StandardCharsets.UTF_8,
                        StandardOpenOption.CREATE_NEW);
                final Writer mappingWriter = options.has(outputMappingTemplate)
                        ? Files.newBufferedWriter(outputMappingPath)
                        : NullWriter.NULL_WRITER) {
            CSVSummariser.runSummarise(csvReader, summaryOutput, mappingWriter,
                    CSVSummariser.DEFAULT_SAMPLE_COUNT, false);
        }
        if (featureCount > 100) {
            System.out.println("");
        }
        System.out.println("");
        System.out.println("Feature count: " + featureCount);

        SimpleFeatureCollection outputCollection = new ListFeatureCollection(outputSchema, outputFeatureList);
        Path outputShapefilePath = outputPath.resolve(prefix + "-" + outputSchema.getTypeName() + "-dump");
        if (!Files.exists(outputShapefilePath)) {
            Files.createDirectory(outputShapefilePath);
        }
        SHPUtils.writeShapefile(outputCollection, outputShapefilePath);

        // Create ZIP file from the contents to keep the subfiles together
        Path outputShapefileZipPath = outputPath
                .resolve(prefix + "-" + outputSchema.getTypeName() + "-dump.zip");
        try (final OutputStream out = Files.newOutputStream(outputShapefileZipPath,
                StandardOpenOption.CREATE_NEW);
                final ZipOutputStream zip = new ZipOutputStream(out, StandardCharsets.UTF_8);) {
            Files.list(outputShapefilePath).forEachOrdered(Unchecked.consumer(e -> {
                zip.putNextEntry(new ZipEntry(e.getFileName().toString()));
                Files.copy(e, zip);
                zip.closeEntry();
            }));
        }

        try (final OutputStream outputStream = Files.newOutputStream(
                outputPath.resolve(prefix + "." + format.value(options)), StandardOpenOption.CREATE_NEW);) {
            MapContent map = new MapContent();
            map.setTitle(prefix + "-" + outputSchema.getTypeName());
            Style style = SLD.createSimpleStyle(featureSource.getSchema());
            Layer layer = new FeatureLayer(new CollectionFeatureSource(outputCollection), style);
            map.addLayer(layer);
            SHPUtils.renderImage(map, outputStream, resolution.value(options), format.value(options));
        }
    }
}

From source file:com.github.ffremont.microservices.springboot.node.tasks.UninstallTask.java

/**
 * //  ww  w .  jav  a  2 s . c o m
 * @param path
 * @throws IOException 
 */
private void remove(Path path) throws IOException {
    Files.list(path).forEach((Path item) -> {
        try {
            if (Files.isDirectory(item)) {
                this.remove(item);
            } else {
                Files.delete(item);
            }
        } catch (IOException e) {
            LOG.error("Impossible de supprmimer les lments dans " + item.toAbsolutePath(), e);
        }
    });

    Files.delete(path);
}

From source file:org.esa.s2tbx.dataio.openjpeg.OpenJPEGActivator.java

private static void fixUpPermissions(Path destPath) throws IOException {
    Stream<Path> files = Files.list(destPath);
    files.forEach(path -> {/* w  ww. j  a  v a  2s  .c  om*/
        if (Files.isDirectory(path)) {
            try {
                fixUpPermissions(path);
            } catch (IOException e) {
                SystemUtils.LOG.severe("OpenJPEG configuration error: failed to fix permissions on " + path);
            }
        } else {
            setExecutablePermissions(path);
        }
    });
}

From source file:objective.taskboard.rules.CleanupDataFolderRule.java

@Override
protected void before() throws Throwable {
    if (Files.exists(folder))
        beforeList = Files.list(folder).collect(Collectors.toList());
}

From source file:org.ng200.openolympus.FileAccess.java

public static <T> T actOnChildren(Path path, Function<Stream<Path>, T> toApply) throws IOException {
    try (Stream<Path> children = Files.list(path)) {
        return toApply.apply(children);
    }/*from  w ww .ja  v a 2  s.c o m*/
}

From source file:org.silverpeas.core.util.file.FileFolderManager.java

/**
 * Gets all the folders (an only the folders, not the files) that are directly inside the
 * specified directory. Throws an {@link UtilException} if the
 * specified path doesn't denote a directory or if the listing of all of its directories fails.
 * @param path the path of the directory.
 * @return a collection of folders./*  w  ww  . j  a v a  2s . c o  m*/
 */
public static Collection<File> getAllSubFolder(final String path) {
    final List<File> result;
    final Path directory = Paths.get(path);
    if (directory.toFile().isDirectory()) {
        try (final Stream<Path> folders = Files.list(directory)) {
            result = folders.filter(p -> p.toFile().isDirectory()).map(Path::toFile)
                    .collect(Collectors.toList());
        } catch (IOException e) {
            throw new UtilException(e);
        }
    } else {
        throw new UtilException(path + NOT_A_DIRECTORY_MSG);
    }
    return result;
}