Example usage for java.nio.file Files newDirectoryStream

List of usage examples for java.nio.file Files newDirectoryStream

Introduction

In this page you can find the example usage for java.nio.file Files newDirectoryStream.

Prototype

public static DirectoryStream<Path> newDirectoryStream(Path dir, DirectoryStream.Filter<? super Path> filter)
        throws IOException 

Source Link

Document

Opens a directory, returning a DirectoryStream to iterate over the entries in the directory.

Usage

From source file:org.cryptomator.webdav.jackrabbit.resources.EncryptedDir.java

@Override
public DavResourceIterator getMembers() {
    final Path dir = ResourcePathUtils.getPhysicalPath(this);
    try {/*from w w w  .j  a  v a2 s. com*/
        final DirectoryStream<Path> directoryStream = Files.newDirectoryStream(dir,
                cryptor.getPayloadFilesFilter());
        final List<DavResource> result = new ArrayList<>();

        for (final Path childPath : directoryStream) {
            final DavResourceLocator childLocator = locator.getFactory().createResourceLocator(
                    locator.getPrefix(), locator.getWorkspacePath(), childPath.toString(), false);
            final DavResource resource = factory.createResource(childLocator, session);
            result.add(resource);
        }
        return new DavResourceIteratorImpl(result);
    } catch (IOException e) {
        LOG.error("Exception during getMembers.", e);
        throw new IORuntimeException(e);
    } catch (DavException e) {
        LOG.error("Exception during getMembers.", e);
        throw new DavRuntimeException(e);
    }
}

From source file:org.apache.hadoop.hive.ql.log.TestSlidingFilenameRolloverStrategy.java

@Test
public void testSlidingLogFiles() throws Exception {
    assertEquals("bad props file", PROPERTIES_FILE, System.getProperty("log4j.configurationFile"));

    // Where the log files wll be written
    Path logTemplate = FileSystems.getDefault().getPath(FILE_PATTERN);
    String fileName = logTemplate.getFileName().toString();
    Path parent = logTemplate.getParent();
    try {/*from   w  w  w .  j  a v a 2  s .  co m*/
        Files.createDirectory(parent);
    } catch (FileAlreadyExistsException e) {
        // OK, fall through.
    }

    // Delete any stale log files left around from previous failed tests
    deleteLogFiles(parent, fileName);

    Logger logger = LogManager.getLogger(LineageLogger.class);

    // Does the logger config look correct?
    org.apache.logging.log4j.core.Logger coreLogger = (org.apache.logging.log4j.core.Logger) logger;
    LoggerConfig loggerConfig = coreLogger.get();
    Map<String, Appender> appenders = loggerConfig.getAppenders();

    assertNotNull("sliding appender is missing", appenders.get("sliding"));

    // Do some logging and force log rollover
    int NUM_LOGS = 7;
    logger.debug("Debug Message Logged !!!");
    logger.info("Info Message Logged !!!");

    String errorString = "Error Message Logged ";
    for (int i = 0; i < NUM_LOGS; i++) {
        TimeUnit.MILLISECONDS.sleep(100);
        // log an exception - this produces enough text to force a new logfile
        // (as appender.sliding.policies.size.size=1KB)
        logger.error(errorString + i, new RuntimeException("part of a test"));
    }

    // Check log files look OK
    DirectoryStream<Path> stream = Files.newDirectoryStream(parent, fileName + ".*");
    int count = 0;
    for (Path path : stream) {
        count++;
        String contents = new String(Files.readAllBytes(path), "UTF-8");
        // There should be one exception message per file
        assertTrue("File " + path + " did not have expected content", contents.contains(errorString));
        String suffix = StringUtils.substringAfterLast(path.toString(), ".");
        // suffix should be a timestamp
        try {
            long timestamp = Long.parseLong(suffix);
        } catch (NumberFormatException e) {
            fail("Suffix " + suffix + " is not a long");
        }
    }
    assertEquals("bad count of log files", NUM_LOGS, count);

    // Check there is no log file without the suffix
    assertFalse("file should not exist:" + logTemplate, Files.exists(logTemplate));

    // Clean up
    deleteLogFiles(parent, fileName);
}

From source file:org.apache.bookkeeper.mledger.offload.OffloaderUtils.java

public static Offloaders searchForOffloaders(String connectorsDirectory) throws IOException {
    Path path = Paths.get(connectorsDirectory).toAbsolutePath();
    log.info("Searching for offloaders in {}", path);

    Offloaders offloaders = new Offloaders();

    if (!path.toFile().exists()) {
        log.warn("Offloaders archive directory not found");
        return offloaders;
    }/*from   w ww  . j a v a2  s. co  m*/

    try (DirectoryStream<Path> stream = Files.newDirectoryStream(path, "*.nar")) {
        stream.forEach(archive -> {
            try {
                OffloaderDefinition definition = getOffloaderDefinition(archive.toString());
                log.info("Found offloader {} from {}", definition, archive);

                if (!StringUtils.isEmpty(definition.getOffloaderFactoryClass())) {
                    // Validate offloader factory class to be present and of the right type
                    Pair<NarClassLoader, LedgerOffloaderFactory> offloaderFactoryPair = getOffloaderFactory(
                            archive.toString());
                    if (null != offloaderFactoryPair) {
                        offloaders.getOffloaders().add(offloaderFactoryPair);
                    }
                }
            } catch (Throwable t) {
                log.warn("Failed to load offloader from {}", archive, t);
            }
        });
    }
    log.info("Found and loaded {} offloaders", offloaders.getOffloaders().size());
    return offloaders;
}

From source file:org.apereo.lap.services.configuration.ConfigurationService.java

@PostConstruct
public void init() throws IOException {
    logger.info("INIT started");

    if (StringUtils.isNotBlank(lapHome)) {
        applicationHomeDirectory = Paths.get(lapHome);
    } else {//from w ww  .j  av a  2  s .co m
        // if not configured specifically, use $PWD/lapHome
        applicationHomeDirectory = Paths.get(System.getProperty("user.dir"), "lapHome");
    }

    logger.info("App Home: " + applicationHomeDirectory);

    if (StringUtils.isNotBlank(dirPipelines)) {
        pipelinesDirectory = Paths.get(dirPipelines);
    } else {
        pipelinesDirectory = applicationHomeDirectory.resolve("pipelines");
    }

    if (StringUtils.isNotBlank(dirInputs)) {
        inputDirectory = Paths.get(dirInputs);
    } else {
        inputDirectory = applicationHomeDirectory.resolve("inputs");
    }

    if (StringUtils.isNotBlank(dirOutputs)) {
        outputDirectory = Paths.get(dirOutputs);
    } else {
        outputDirectory = applicationHomeDirectory.resolve("outputs");
    }

    logger.info("Pipeline Dir: " + pipelinesDirectory);
    logger.info("Inputs Dir: " + inputDirectory);
    logger.info("Outputs Dir: " + outputDirectory);

    if (!Files.isDirectory(outputDirectory)) {
        Files.createDirectories(outputDirectory);
    }

    pipelineConfigs = new ConcurrentHashMap<>();

    // load the external pipeline configs
    for (Path entry : Files.newDirectoryStream(pipelinesDirectory, new DirectoryStream.Filter<Path>() {
        @Override
        public boolean accept(Path entry) throws IOException {
            return Files.isRegularFile(entry);
        }
    })) {
        PipelineConfig filePLC = buildPipelineConfig(entry);
        if (filePLC != null) {
            pipelineConfigs.put(filePLC.getType(), filePLC);
        }
    }
}

From source file:org.apache.zeppelin.interpreter.launcher.SparkInterpreterLauncher.java

@Override
protected Map<String, String> buildEnvFromProperties(InterpreterLaunchContext context) {
    Map<String, String> env = super.buildEnvFromProperties(context);
    Properties sparkProperties = new Properties();
    String sparkMaster = getSparkMaster(properties);
    for (String key : properties.stringPropertyNames()) {
        if (RemoteInterpreterUtils.isEnvString(key)) {
            env.put(key, properties.getProperty(key));
        }//  www. java  2s.com
        if (isSparkConf(key, properties.getProperty(key))) {
            sparkProperties.setProperty(key, toShellFormat(properties.getProperty(key)));
        }
    }

    setupPropertiesForPySpark(sparkProperties);
    setupPropertiesForSparkR(sparkProperties);
    if (isYarnMode() && getDeployMode().equals("cluster")) {
        env.put("ZEPPELIN_SPARK_YARN_CLUSTER", "true");
        sparkProperties.setProperty("spark.yarn.submit.waitAppCompletion", "false");
    }

    StringBuilder sparkConfBuilder = new StringBuilder();
    if (sparkMaster != null) {
        sparkConfBuilder.append(" --master " + sparkMaster);
    }
    if (isYarnMode() && getDeployMode().equals("cluster")) {
        if (sparkProperties.containsKey("spark.files")) {
            sparkProperties.put("spark.files", sparkProperties.getProperty("spark.files") + ","
                    + zConf.getConfDir() + "/log4j_yarn_cluster.properties");
        } else {
            sparkProperties.put("spark.files", zConf.getConfDir() + "/log4j_yarn_cluster.properties");
        }
    }
    for (String name : sparkProperties.stringPropertyNames()) {
        sparkConfBuilder.append(" --conf " + name + "=" + sparkProperties.getProperty(name));
    }
    String useProxyUserEnv = System.getenv("ZEPPELIN_IMPERSONATE_SPARK_PROXY_USER");
    if (context.getOption().isUserImpersonate()
            && (StringUtils.isBlank(useProxyUserEnv) || !useProxyUserEnv.equals("false"))) {
        sparkConfBuilder.append(" --proxy-user " + context.getUserName());
    }
    Path localRepoPath = Paths.get(zConf.getInterpreterLocalRepoPath(), context.getInterpreterSettingId());
    if (isYarnMode() && getDeployMode().equals("cluster") && Files.exists(localRepoPath)
            && Files.isDirectory(localRepoPath)) {
        try {
            StreamSupport
                    .stream(Files.newDirectoryStream(localRepoPath, entry -> Files.isRegularFile(entry))
                            .spliterator(), false)
                    .map(jar -> jar.toAbsolutePath().toString()).reduce((x, y) -> x.concat(",").concat(y))
                    .ifPresent(extraJars -> sparkConfBuilder.append(" --jars ").append(extraJars));
        } catch (IOException e) {
            LOGGER.error("Cannot make a list of additional jars from localRepo: {}", localRepoPath, e);
        }

    }

    env.put("ZEPPELIN_SPARK_CONF", sparkConfBuilder.toString());

    // set these env in the order of
    // 1. interpreter-setting
    // 2. zeppelin-env.sh
    // It is encouraged to set env in interpreter setting, but just for backward compatability,
    // we also fallback to zeppelin-env.sh if it is not specified in interpreter setting.
    for (String envName : new String[] { "SPARK_HOME", "SPARK_CONF_DIR", "HADOOP_CONF_DIR" }) {
        String envValue = getEnv(envName);
        if (envValue != null) {
            env.put(envName, envValue);
        }
    }

    String keytab = zConf.getString(ZeppelinConfiguration.ConfVars.ZEPPELIN_SERVER_KERBEROS_KEYTAB);
    String principal = zConf.getString(ZeppelinConfiguration.ConfVars.ZEPPELIN_SERVER_KERBEROS_PRINCIPAL);

    if (!StringUtils.isBlank(keytab) && !StringUtils.isBlank(principal)) {
        env.put("ZEPPELIN_SERVER_KERBEROS_KEYTAB", keytab);
        env.put("ZEPPELIN_SERVER_KERBEROS_PRINCIPAL", principal);
        LOGGER.info("Run Spark under secure mode with keytab: " + keytab + ", principal: " + principal);
    } else {
        LOGGER.info("Run Spark under non-secure mode as no keytab and principal is specified");
    }
    LOGGER.debug("buildEnvFromProperties: " + env);
    return env;

}

From source file:net.fatlenny.datacitation.service.GitCitationDBService.java

@Override
public List<Query> getQueries() throws CitationDBException {
    checkoutBranch(REF_QUERIES);// w w  w.j a v a  2  s  .  co m

    List<Query> queries = new ArrayList<>();
    String workingTreeDir = getWorkingTreeDir();

    try (DirectoryStream<Path> ds = Files.newDirectoryStream(Paths.get(workingTreeDir), "*." + QUERY_ENDING)) {
        for (Path path : ds) {
            queries.add(getQueryFromFile(path));
        }
    } catch (IOException e) {
        throw new CitationDBException("Error reading data files: ", e);
    }

    return queries;
}

From source file:org.cryptomator.webdav.jackrabbit.EncryptedDir.java

@Override
public DavResourceIterator getMembers() {
    final Path dir = ResourcePathUtils.getPhysicalPath(this);
    try {//from  www.  j  av a  2 s .c o  m
        final DirectoryStream<Path> directoryStream = Files.newDirectoryStream(dir,
                cryptor.getPayloadFilesFilter());
        final List<DavResource> result = new ArrayList<>();

        for (final Path childPath : directoryStream) {
            try {
                final DavResourceLocator childLocator = locator.getFactory().createResourceLocator(
                        locator.getPrefix(), locator.getWorkspacePath(), childPath.toString(), false);
                final DavResource resource = factory.createResource(childLocator, session);
                result.add(resource);
            } catch (DecryptFailedRuntimeException e) {
                LOG.warn("Decryption of resource failed: " + childPath);
                continue;
            }
        }
        return new DavResourceIteratorImpl(result);
    } catch (IOException e) {
        LOG.error("Exception during getMembers.", e);
        throw new IORuntimeException(e);
    } catch (DavException e) {
        LOG.error("Exception during getMembers.", e);
        throw new DavRuntimeException(e);
    }
}

From source file:de.kaixo.mubi.lists.store.MubiListsToElasticSearchLoader.java

public void loadAllFromDir(Path dir) {
    try {/*w  w w.ja v  a  2s .  c o m*/
        Files.newDirectoryStream(dir, "*.json").forEach(this::loadFromJson);
    } catch (IOException e) {
        logger.error("Failing to process " + dir.toString());
    }
}

From source file:ee.ria.xroad.common.conf.globalconf.ConfigurationDirectoryV2.java

/**
 * Reloads the configuration directory. Only files that are new or have
 * changed, are actually loaded./*from w ww .j a v a  2 s  .  c  o  m*/
 * @throws Exception if an error occurs during reload
 */
public synchronized void reload() throws Exception {
    Map<String, PrivateParametersV2> privateParams = new HashMap<>();
    Map<String, SharedParametersV2> sharedParams = new HashMap<>();

    log.trace("Reloading configuration from {}", path);

    instanceIdentifier = null;

    try (DirectoryStream<Path> stream = Files.newDirectoryStream(path, Files::isDirectory)) {
        for (Path instanceDir : stream) {
            log.trace("Loading parameters from {}", instanceDir);

            loadPrivateParameters(instanceDir, privateParams);
            loadSharedParameters(instanceDir, sharedParams);
        }
    }

    this.privateParameters = privateParams;
    this.sharedParameters = sharedParams;
}

From source file:paketti.AbstractReader.java

protected DirectoryStream<Path> getStream(Path dirPath) throws IOException {
    return Files.newDirectoryStream(dirPath, getMediaFilter(getMediaFileEnds()));
}