Example usage for java.nio.file Path resolve

List of usage examples for java.nio.file Path resolve

Introduction

In this page you can find the example usage for java.nio.file Path resolve.

Prototype

default Path resolve(String other) 

Source Link

Document

Converts a given path string to a Path and resolves it against this Path in exactly the manner specified by the #resolve(Path) resolve method.

Usage

From source file:ddf.test.itests.catalog.TestCatalog.java

@Test
public void testMetacardDefinitionJsonFile() throws Exception {
    getServiceManager().startFeature(true, "catalog-core-validator");
    Path definitionsDir = Paths.get(System.getProperty("ddf.home"), "etc/definitions");
    definitionsDir = Files.createDirectories(definitionsDir);
    definitionsDir.toFile().deleteOnExit();
    Path tmpFile = definitionsDir.resolve("definitions.json");
    tmpFile.toFile().deleteOnExit();/* w ww .  j a  va 2s .  c  om*/
    Files.copy(getClass().getClassLoader().getResourceAsStream("definitions.json"), tmpFile);

    expect("Service to be available: " + MetacardType.class.getName()).within(10, TimeUnit.SECONDS).until(
            () -> getServiceManager().getServiceReferences(MetacardType.class, "(name=new.metacard.type)"),
            not(empty()));

    String ddfMetacardXml = IOUtils.toString(getClass().getClassLoader().getResourceAsStream("metacard1.xml"),
            UTF_8.name());

    String modifiedMetacardXml = ddfMetacardXml.replaceFirst("ddf\\.metacard", "new.metacard.type")
            .replaceFirst("resource-uri", "new-attribute-required-2");
    String id = ingest(modifiedMetacardXml, "text/xml");
    configureShowInvalidMetacards("true");
    try {

        String newMetacardXpath = String.format("/metacards/metacard[@id=\"%s\"]", id);

        executeOpenSearch("xml", "q=*").log().all().assertThat().body(hasXPath(newMetacardXpath))
                .body(hasXPath(newMetacardXpath + "/type", is("new.metacard.type")))
                .body(hasXPath("count(" + newMetacardXpath + "/string[@name=\"validation-errors\"]/value)",
                        is("2")))
                .body(hasXPath(newMetacardXpath
                        + "/string[@name=\"validation-errors\"]/value[text()=\"point-of-contact is required\"]"))
                .body(hasXPath(newMetacardXpath
                        + "/string[@name=\"validation-errors\"]/value[text()=\"new-attribute-required-1 is required\"]"))
                .body(hasXPath(newMetacardXpath + "/string[@name=\"new-attribute-required-2\"]/value",
                        is("\" + uri + \"")));

    } finally {
        deleteMetacard(id);
        getServiceManager().stopFeature(true, "catalog-core-validator");
        configureShowInvalidMetacards("false");
    }
}

From source file:io.github.alechenninger.monarch.Main.java

public void run(String[] args) throws ParseException, IOException {
    try {/* w  w  w.java  2  s  . c o  m*/
        CliInputs cliInputs = CliInputs.parse(args);

        if (cliInputs.helpRequested()) {
            System.out.print(cliInputs.helpMessage());
            return;
        }

        MonarchOptions options = getOptionsFromInputsAndConfigFiles(cliInputs, fileSystem, parsers);

        Path outputDir = options.outputDir().orElseThrow(missingOptionException("output directory"));
        Path dataDir = options.dataDir().orElseThrow(missingOptionException("data directory"));
        Hierarchy hierarchy = options.hierarchy().orElseThrow(missingOptionException("hierarchy"));
        String target = options.target().orElseThrow(missingOptionException("target"));
        Iterable<Change> changes = options.changes();
        Set<String> mergeKeys = options.mergeKeys();

        if (!changes.iterator().hasNext()) {
            System.out.println("No changes provided; formatting target.");
        }

        List<String> affectedSources = hierarchy.hierarchyOf(target).orElseThrow(
                () -> new IllegalArgumentException("Target source not found in hierarchy: " + options.target()))
                .descendants();

        Map<String, Map<String, Object>> currentData = readDataForHierarchy(dataDir, hierarchy);

        Map<String, Map<String, Object>> result = monarch.generateSources(hierarchy, changes, target,
                currentData, mergeKeys);

        for (Map.Entry<String, Map<String, Object>> sourceToData : result.entrySet()) {
            String source = sourceToData.getKey();

            if (!affectedSources.contains(source)) {
                continue;
            }

            Path sourcePath = outputDir.resolve(source);
            ensureParentDirectories(sourcePath);

            SortedMap<String, Object> sorted = new TreeMap<>(sourceToData.getValue());

            if (sorted.isEmpty()) {
                Files.write(sourcePath, new byte[] {});
            } else {
                yaml.dump(sorted, Files.newBufferedWriter(sourcePath, UTF_8));
            }
        }
    } catch (MonarchException | ParseException e) {
        e.printStackTrace();
        System.out.print(CliInputs.parse(new String[0]).helpMessage());
    }
}

From source file:fr.pilato.elasticsearch.crawler.fs.test.integration.FsCrawlerImplAllParametersIT.java

/**
 * We suppose that each test has its own set of files. Even if we duplicate them, that will make the code
 * more readable.//www .j a v  a 2  s. c  o m
 * The temp folder which is used as a root is automatically cleaned after the test so we don't have to worry
 * about it.
 */
@Before
public void copyTestResources() throws IOException, URISyntaxException {
    Path testResourceTarget = rootTmpDir.resolve("resources");
    if (Files.notExists(testResourceTarget)) {
        Files.createDirectory(testResourceTarget);
    }

    String currentTestName = getCurrentTestName();
    // We copy files from the src dir to the temp dir
    staticLogger.info("  --> Launching test [{}]", currentTestName);
    String url = getUrl("samples", currentTestName);
    Path from = Paths.get(url);
    currentTestResourceDir = testResourceTarget.resolve(currentTestName);

    if (Files.exists(from)) {
        staticLogger.debug("  --> Copying test resources from [{}]", from);
    } else {
        staticLogger.debug("  --> Copying test resources from [{}]", DEFAULT_RESOURCES);
        from = DEFAULT_RESOURCES;
    }

    FsCrawlerUtil.copyDirs(from, currentTestResourceDir);

    staticLogger.debug("  --> Test resources ready in [{}]", currentTestResourceDir);
}

From source file:com.upplication.s3fs.util.AmazonS3ClientMock.java

public void addFile(Path bucket, String fileName, byte[] content, FileAttribute<?>... attrs)
        throws IOException {
    if (fileName.endsWith("/"))
        fileName.substring(0, fileName.length() - 1);
    Path file = Files.createFile(bucket.resolve(fileName.replaceAll("/", "%2F")), attrs);
    try (OutputStream outputStream = Files.newOutputStream(file)) {
        outputStream.write(content);//from  w w  w.  ja  v  a 2  s .c o  m
    }
}

From source file:au.org.ands.vocabs.toolkit.provider.transform.PropertyRewriterTransformProvider.java

@Override
public final boolean transform(final TaskInfo taskInfo, final JsonNode subtask,
        final HashMap<String, String> results) {
    // Prepare for rewriting.
    if (!loadRewriteMap()) {
        results.put(TaskStatus.ERROR, "PropertyRewriter unable to load rewrite map");
        return false;
    }// www.ja  v a2s. c  o  m

    Path originalHarvestdir = Paths.get(ToolkitFileUtils.getTaskHarvestOutputPath(taskInfo));
    // Use this transform name and the task ID to construct
    // the path names.
    String transformName = "PropertyRewriter_" + taskInfo.getTask().getId();
    String transformOutputDir = ToolkitFileUtils.getTaskTransformTemporaryOutputPath(taskInfo, transformName);
    Path transformOutputDirPath = Paths.get(transformOutputDir);

    try {
        ToolkitFileUtils.requireEmptyDirectory(transformOutputDir);
    } catch (IOException ex) {
        results.put(TaskStatus.EXCEPTION,
                "Exception in PropertyRewriter while cleaning old " + "transform output directory");
        logger.error("Exception in PropertyRewriter while cleaning old " + "transform output directory: ", ex);
        return false;
    }

    // Open the harvest directory ...
    try (DirectoryStream<Path> stream = Files.newDirectoryStream(originalHarvestdir)) {
        // ... and iterate over every file in the harvest directory.
        for (Path entry : stream) {
            // First, parse the file into a model and do rewriting.
            Model model = new LinkedHashModel();
            RDFFormat format = Rio.getParserFormatForFileName(entry.toString());
            RDFParser rdfParser = Rio.createParser(format);
            ConceptHandler conceptHandler = new ConceptHandler(metadataRewriteConf, model);
            rdfParser.setRDFHandler(conceptHandler);
            FileInputStream is = new FileInputStream(entry.toString());
            logger.debug("Reading RDF:" + entry.toString());
            rdfParser.parse(is, entry.toString());
            // And now serialize the result.
            String resultFileName = transformOutputDirPath.resolve(entry.getFileName()).toString();
            FileOutputStream out = new FileOutputStream(resultFileName);
            // Write in the same format we read.
            Rio.write(model, out, format);
            out.close();
        }
    } catch (DirectoryIteratorException | IOException | RDFParseException | RDFHandlerException
            | UnsupportedRDFormatException ex) {
        results.put(TaskStatus.EXCEPTION, "Exception in PropertyRewriter while Parsing RDF");
        logger.error("Exception in PropertyRewriter while Parsing RDF:", ex);
        return false;
    }

    // Done rewriting, and was successful. Replace the old
    // harvest with the transformed files.
    if (!ToolkitFileUtils.renameTransformTemporaryOutputPath(taskInfo, transformName)) {
        results.put(TaskStatus.ERROR, "Error in PropertyRewriter when renaming output " + "directory");
        logger.error("Error in PropertyRewriter when renaming output " + "directory");
        return false;
    }

    return true;
}

From source file:org.apache.solr.SolrTestCaseJ4.java

public static void writeCoreProperties(Path coreDirectory, Properties properties, String testname)
        throws IOException {
    log.info("Writing core.properties file to {}", coreDirectory);
    Files.createDirectories(coreDirectory);
    try (Writer writer = new OutputStreamWriter(
            Files.newOutputStream(coreDirectory.resolve(CORE_PROPERTIES_FILENAME)), Charset.forName("UTF-8"))) {
        properties.store(writer, testname);
    }/*from   w  w  w . jav  a 2  s  . c  o m*/
}

From source file:com.arpnetworking.test.junitbenchmarks.JsonBenchmarkConsumer.java

/**
 * {@inheritDoc}//from  ww  w.ja v  a2s .  c om
 */
@Override
public void close() {
    if (!_closed) {
        try {
            // Create the output path
            final Path outputDirectory = _path.getParent();
            final String nameWithoutExtension = com.google.common.io.Files
                    .getNameWithoutExtension(_path.toString());
            ensurePathExists();

            // Merge the results
            final List<AugmentedResult> augmentedResults = Lists.newArrayListWithExpectedSize(
                    _resultsWithoutProfileData.size() + _resultsWithProfileData.size());

            // Simply wrap the results without profile data
            augmentedResults.addAll(
                    _resultsWithoutProfileData.stream().map(AugmentedResult::new).collect(Collectors.toList()));

            // For results with profile data extract the data and pair it with the result
            for (final Map.Entry<Integer, Result> entry : _resultsWithProfileData.entrySet()) {
                final int index = entry.getKey();
                final Result result = entry.getValue();

                final Optional<Path> profileDataFile = getProfileFile();
                if (profileDataFile.isPresent()) {
                    LOGGER.info(
                            String.format("Filtering profile for %s.%s in %s at %d", result.getTestClassName(),
                                    result.getTestMethodName(), profileDataFile.get(), index));

                    final Path extractedProfileDataFile = outputDirectory
                            .resolve(nameWithoutExtension + "." + result.getTestMethodName() + ".hprof");
                    new HProfFilter(profileDataFile.get(), Optional.of(extractedProfileDataFile),
                            Optional.of(index)).run();

                    augmentedResults.add(new AugmentedResult(result, extractedProfileDataFile));
                } else {
                    LOGGER.warn("Profile data file lost between accept and close");
                    augmentedResults.add(new AugmentedResult(result));
                }
            }

            // Output the test performance results
            LOGGER.info(String.format("Closing; file=%s", _path));
            final FileOutputStream outputStream = new FileOutputStream(_path.toString(), _append);
            try {
                OBJECT_MAPPER.writeValue(outputStream, augmentedResults);
                outputStream.write("\n".getBytes(StandardCharsets.UTF_8));
            } finally {
                IOUtils.closeQuietly(outputStream);
            }
        } catch (final IOException e) {
            LOGGER.error("Could not write json performance file", e);
        }
        _closed = true;
    }
}

From source file:com.facebook.buck.util.unarchive.Untar.java

@VisibleForTesting
ImmutableSet<Path> extractArchive(Path archiveFile, ProjectFilesystem filesystem, Path filesystemRelativePath,
        Optional<Path> stripPath, ExistingFileMode existingFileMode, PatternsMatcher entriesToExclude,
        boolean writeSymlinksAfterCreatingFiles) throws IOException {

    ImmutableSet.Builder<Path> paths = ImmutableSet.builder();
    HashSet<Path> dirsToTidy = new HashSet<>();
    TreeMap<Path, Long> dirCreationTimes = new TreeMap<>();
    DirectoryCreator creator = new DirectoryCreator(filesystem);

    // On windows, we create hard links instead of symlinks. This is fine, but the
    // destination file may not exist yet, which is an error. So, just hold onto the paths until
    // all files are extracted, and /then/ try to do the links
    Map<Path, Path> windowsSymlinkMap = new HashMap<>();

    try (TarArchiveInputStream archiveStream = getArchiveInputStream(archiveFile)) {
        TarArchiveEntry entry;//from  w w w .ja v  a 2 s  . c  o m
        while ((entry = archiveStream.getNextTarEntry()) != null) {
            String entryName = entry.getName();
            if (entriesToExclude.matchesAny(entryName)) {
                continue;
            }
            Path destFile = Paths.get(entryName);
            Path destPath;
            if (stripPath.isPresent()) {
                if (!destFile.startsWith(stripPath.get())) {
                    continue;
                }
                destPath = filesystemRelativePath.resolve(stripPath.get().relativize(destFile)).normalize();
            } else {
                destPath = filesystemRelativePath.resolve(destFile).normalize();
            }

            if (entry.isDirectory()) {
                dirsToTidy.add(destPath);
                mkdirs(creator, destPath);
                dirCreationTimes.put(destPath, entry.getModTime().getTime());
            } else if (entry.isSymbolicLink()) {
                if (writeSymlinksAfterCreatingFiles) {
                    recordSymbolicLinkForWindows(creator, destPath, entry, windowsSymlinkMap);
                } else {
                    writeSymbolicLink(creator, destPath, entry);
                }
                paths.add(destPath);
                setAttributes(filesystem, destPath, entry);
            } else if (entry.isFile()) {
                writeFile(creator, archiveStream, destPath);
                paths.add(destPath);
                setAttributes(filesystem, destPath, entry);
            }
        }

        writeWindowsSymlinks(creator, windowsSymlinkMap);
    } catch (CompressorException e) {
        throw new IOException(String.format("Could not get decompressor for archive at %s", archiveFile), e);
    }

    setDirectoryModificationTimes(filesystem, dirCreationTimes);

    ImmutableSet<Path> filePaths = paths.build();
    if (existingFileMode == ExistingFileMode.OVERWRITE_AND_CLEAN_DIRECTORIES) {
        // Clean out directories of files that were not in the archive
        tidyDirectories(filesystem, dirsToTidy, filePaths);
    }
    return filePaths;
}