Example usage for java.nio.file Path toUri

List of usage examples for java.nio.file Path toUri

Introduction

In this page you can find the example usage for java.nio.file Path toUri.

Prototype

URI toUri();

Source Link

Document

Returns a URI to represent this path.

Usage

From source file:com.netflix.genie.web.services.loadbalancers.script.ScriptLoadBalancer.java

/**
 * Check if the script file needs to be refreshed.
 *///from  w  w w  .  j  a  v  a2 s .  com
public void refresh() {
    log.debug("Refreshing");
    final long updateStart = System.nanoTime();
    final Set<Tag> tags = Sets.newHashSet();
    try {
        this.isUpdating.set(true);

        // Update the script timeout
        this.timeoutLength.set(this.environment.getProperty(ScriptLoadBalancerProperties.TIMEOUT_PROPERTY,
                Long.class, DEFAULT_TIMEOUT_LENGTH));

        final String scriptFileSourceValue = this.environment
                .getProperty(ScriptLoadBalancerProperties.SCRIPT_FILE_SOURCE_PROPERTY);
        if (StringUtils.isBlank(scriptFileSourceValue)) {
            throw new IllegalStateException("Invalid empty value for script source file property: "
                    + ScriptLoadBalancerProperties.SCRIPT_FILE_SOURCE_PROPERTY);
        }
        final String scriptFileSource = new URI(scriptFileSourceValue).toString();

        final String scriptFileDestinationValue = this.environment
                .getProperty(ScriptLoadBalancerProperties.SCRIPT_FILE_DESTINATION_PROPERTY);
        if (StringUtils.isBlank(scriptFileDestinationValue)) {
            throw new IllegalStateException("Invalid empty value for script destination directory property: "
                    + ScriptLoadBalancerProperties.SCRIPT_FILE_DESTINATION_PROPERTY);
        }
        final Path scriptDestinationDirectory = Paths.get(new URI(scriptFileDestinationValue));

        // Check the validity of the destination directory
        if (!Files.exists(scriptDestinationDirectory)) {
            Files.createDirectories(scriptDestinationDirectory);
        } else if (!Files.isDirectory(scriptDestinationDirectory)) {
            throw new IllegalStateException("The script destination directory " + scriptDestinationDirectory
                    + " exists but is not a directory");
        }

        final String fileName = StringUtils.substringAfterLast(scriptFileSource, SLASH);
        if (StringUtils.isBlank(fileName)) {
            throw new IllegalStateException("No file name found from " + scriptFileSource);
        }

        final String scriptExtension = StringUtils.substringAfterLast(fileName, PERIOD);
        if (StringUtils.isBlank(scriptExtension)) {
            throw new IllegalStateException("No file extension available in " + fileName);
        }

        final Path scriptDestinationPath = scriptDestinationDirectory.resolve(fileName);

        // Download and cache the file (if it's not already there)
        this.fileTransferService.getFile(scriptFileSource, scriptDestinationPath.toUri().toString());

        final ScriptEngine engine = this.scriptEngineManager.getEngineByExtension(scriptExtension);
        // We want a compilable engine so we can cache the script
        if (!(engine instanceof Compilable)) {
            throw new IllegalArgumentException("Script engine must be of type " + Compilable.class.getName());
        }
        final Compilable compilable = (Compilable) engine;
        try (InputStream fis = Files.newInputStream(scriptDestinationPath);
                InputStreamReader reader = new InputStreamReader(fis, UTF_8)) {
            log.debug("Compiling {}", scriptFileSource);
            this.script.set(compilable.compile(reader));
        }

        tags.add(Tag.of(MetricsConstants.TagKeys.STATUS, STATUS_TAG_OK));

        this.isConfigured.set(true);
    } catch (final GenieException | IOException | ScriptException | RuntimeException | URISyntaxException e) {
        tags.add(Tag.of(MetricsConstants.TagKeys.STATUS, STATUS_TAG_FAILED));
        tags.add(Tag.of(MetricsConstants.TagKeys.EXCEPTION_CLASS, e.getClass().getName()));
        log.error("Refreshing the load balancing script for ScriptLoadBalancer failed due to {}",
                e.getMessage(), e);
        this.isConfigured.set(false);
    } finally {
        this.isUpdating.set(false);
        this.registry.timer(UPDATE_TIMER_NAME, tags).record(System.nanoTime() - updateStart,
                TimeUnit.NANOSECONDS);
        log.debug("Refresh completed");
    }
}

From source file:com.marklogic.entityservices.e2e.ExamplesBase.java

private void importOrDescend(Path directory, WriteHostBatcher batcher, String collection, Format format) {
    try (DirectoryStream<Path> stream = Files.newDirectoryStream(directory)) {
        for (Path entry : stream) {
            if (entry.toFile().isDirectory()) {
                logger.info("Reading subdirectory " + entry.getFileName().toString());
                importOrDescend(entry, batcher, collection, format);
            } else {
                logger.debug("Adding " + entry.getFileName().toString());
                String uri = entry.toUri().toString();
                if (collection != null) {
                    DocumentMetadataHandle metadata = new DocumentMetadataHandle().withCollections(collection) //
                            .withPermission("nwind-reader", Capability.READ) //
                            .withPermission("nwind-writer", Capability.INSERT, Capability.UPDATE);
                    batcher.add(uri, metadata, new FileHandle(entry.toFile()).withFormat(format));
                } else {
                    batcher.add(uri, new FileHandle(entry.toFile()).withFormat(format));
                }/*  www  .  j  a v a 2 s.c  om*/
                logger.debug("Inserted " + format.toString() + " document " + uri);
            }
        }

    } catch (IOException e) {
        e.printStackTrace();
    }
}

From source file:org.dataconservancy.packaging.impl.PackageFileAnalyzer.java

@Override
public Collection<PackagedResource> getContainerRoots(final File pkg) {
    final Map<URI, PackagedResource> packageContainerResources = new HashMap<>();
    final List<URI> visitedChildContainers = new ArrayList<>();
    try {//from ww w. j  a v a 2  s .com
        extractedPackageLocation = packageService.openPackage(extractDir, pkg);

        // Read bag info file to get ore-rem file
        final File bagInfoFile = new File(extractedPackageLocation, BAG_INFO_NAME);
        final String remURI = getTag(new FileInputStream(bagInfoFile), REM_KEY);

        try {
            if (UriUtility.isBagUri(new URI(remURI))) {
                final Path remPath = resolveBagUri(extractDir.toPath(), new URI(remURI));

                final Model remModel = ModelFactory.createDefaultModel();
                remModel.read(remPath.toUri().toString(), getJenaFormatString(remPath));

                final ResIterator nodeIterator = remModel.listResourcesWithProperty(TYPE,
                        remModel.getResource(LDP_CONTAINER));
                if (!nodeIterator.hasNext()) {
                    throw new RuntimeException("Couldn't find any LDP Containers in the package.");
                } else {

                    while (nodeIterator.hasNext()) {
                        final Resource containerResource = nodeIterator.next();
                        if (!visitedChildContainers.contains(new URI(containerResource.getURI()))) {
                            final PackagedResource newContainer = populateLdpContainerResource(remModel,
                                    containerResource, visitedChildContainers, extractDir.toPath());
                            packageContainerResources.put(newContainer.getURI(), newContainer);
                        }
                    }
                }

                // process any remaining binary resources:
                // these are resources that are the object of iana:describes, and are not the object of an
                // ldp:contains.
                remModel.listStatements(null, DESCRIBES_PROPERTY, (String) null)
                        .filterDrop(statement -> remModel.contains(null, LDP_CONTAINS, statement.getObject()))
                        .forEachRemaining(statement -> {
                            try {
                                final PackagedResource binaryResource = populateFileResource(
                                        statement.getObject().asResource(), extractDir.toPath(), remModel);
                                packageContainerResources.put(binaryResource.getURI(), binaryResource);
                            } catch (URISyntaxException | IOException e) {
                                throw new RuntimeException(
                                        "Error processing non-container binary resources: " + e.getMessage(),
                                        e);
                            }
                        });

            }
        } catch (URISyntaxException | IOException e) {
            throw new RuntimeException("An error occurred reading the package Resource map. " + e.getMessage());
        }
        // Read through the REM File to get the ldp concepts to populate the ldpresources
    } catch (final IOException e) {
        throw new RuntimeIOException(
                "Failed to open that package to retrieve the bag-info file. " + e.getMessage(), e);
    }

    // Should be only one container since we only support one root, if we have more than one it was added before
    // it's parent so loop through the visited children and remove them here.
    if (packageContainerResources.size() > 1) {
        visitedChildContainers.forEach(packageContainerResources::remove);
    }

    return packageContainerResources.values();
}

From source file:org.jw.basex.js.rhino.Js.java

public Value require(Value sourcePaths) throws QueryException {
    Context cx = Context.enter();
    try {/* www .java2 s  .c  om*/
        if (require == null) {
            // install the global require function for all modules
            List<String> paths = new ArrayList<String>((int) sourcePaths.size());
            for (Value path : sourcePaths) {
                Path uri = Paths.get((String) path.toJava());
                paths.add(uri.toUri().toString());
            }
            Require reqScript = scope.installRequire(cx, paths, false);
            require = new JsObject(cx, reqScript, scope, queryContext);
        }
        return require;
    } catch (Exception e) {
        throw new QueryException(e);
    } finally {
        Context.exit();
    }
}

From source file:io.promagent.internal.HookMetadataParser.java

private InputStream getResourceAsStream(String name) throws IOException {
    for (Path hookJar : hookJars) {
        try {//from w  w w . j av  a 2  s.  co m
            URL url;
            // For convenient testing, hookJar may be a classes/ directory instead of a JAR file.
            if (Files.isDirectory(hookJar)) {
                url = hookJar.toUri().resolve("." + name).toURL();
            } else if (Files.isRegularFile(hookJar)) {
                url = new URL("jar:" + hookJar.toUri().toURL().toString() + "!" + name);
            } else {
                throw new IOException("Invalid JAR file or classes directory: " + hookJar);
            }
            return url.openStream();
        } catch (FileNotFoundException e) {
        }
    }
    throw new FileNotFoundException(name + " not found in ["
            + hookJars.stream().map(Path::toString).reduce("", (s1, s2) -> s1 + ", " + s2) + "]");
}

From source file:org.apache.coheigea.bigdata.knox.ranger.KnoxRangerTest.java

private void makeStormUIInvocation(int statusCode, String user, String password) throws IOException {
    String basedir = System.getProperty("basedir");
    if (basedir == null) {
        basedir = new File(".").getCanonicalPath();
    }//from  w  ww  . j  a  v a 2  s. com
    Path path = FileSystems.getDefault().getPath(basedir, "/src/test/resources/cluster-configuration.json");

    stormServer.expect().method("GET").pathInfo("/api/v1/cluster/configuration").respond()
            .status(HttpStatus.SC_OK).content(IOUtils.toByteArray(path.toUri()))
            .contentType("application/json");

    given().auth().preemptive().basic(user, password).header("X-XSRF-Header", "jksdhfkhdsf")
            .header("Accept", "application/json").when()
            .get("http://localhost:" + gateway.getAddresses()[0].getPort() + "/gateway/cluster/storm"
                    + "/api/v1/cluster/configuration")
            .then().log().all().statusCode(statusCode);

}

From source file:org.opencb.opencga.storage.core.variant.annotation.DefaultVariantAnnotationManager.java

/**
 * Creates a variant annotation file from an specific source based on the content of a Variant DataBase.
 *
 * @param outDir   File outdir./*from  w  w w  .j a v a 2s  .  com*/
 * @param fileName Generated file name.
 * @param query    Query for those variants to annotate.
 * @param params   Specific params.
 * @return URI of the generated file.
 * @throws VariantAnnotatorException IOException thrown
 */
public URI createAnnotation(Path outDir, String fileName, Query query, ObjectMap params)
        throws VariantAnnotatorException {

    boolean gzip = params == null || params.getBoolean("gzip", true);
    boolean avro = params == null || params.getBoolean("annotation.file.avro", false);
    Path path = Paths.get(outDir != null ? outDir.toString() : "/tmp",
            fileName + ".annot" + (avro ? ".avro" : ".json") + (gzip ? ".gz" : ""));
    URI fileUri = path.toUri();

    /** Getting iterator from OpenCGA Variant database. **/
    QueryOptions iteratorQueryOptions;
    if (params == null) {
        iteratorQueryOptions = new QueryOptions();
    } else {
        iteratorQueryOptions = new QueryOptions(params);
    }
    List<String> include = Arrays.asList("chromosome", "start", "end", "alternate", "reference");
    iteratorQueryOptions.add("include", include);

    int batchSize = 200;
    int numThreads = 8;
    if (params != null) { //Parse query options
        batchSize = params.getInt(BATCH_SIZE, batchSize);
        numThreads = params.getInt(NUM_THREADS, numThreads);
    }

    try {
        DataReader<Variant> variantDataReader = new VariantDBReader(dbAdaptor, query, iteratorQueryOptions);

        ProgressLogger progressLogger = new ProgressLogger("Annotated variants:",
                () -> dbAdaptor.count(query).first(), 200);
        ParallelTaskRunner.TaskWithException<Variant, VariantAnnotation, VariantAnnotatorException> annotationTask = variantList -> {
            List<VariantAnnotation> variantAnnotationList;
            long start = System.currentTimeMillis();
            logger.debug("Annotating batch of {} genomic variants.", variantList.size());
            variantAnnotationList = variantAnnotator.annotate(variantList);
            progressLogger.increment(variantList.size(),
                    () -> ", up to position " + variantList.get(variantList.size() - 1).toString());

            logger.debug("Annotated batch of {} genomic variants. Time: {}s", variantList.size(),
                    (System.currentTimeMillis() - start) / 1000.0);
            return variantAnnotationList;
        };

        final DataWriter<VariantAnnotation> variantAnnotationDataWriter;
        if (avro) {
            variantAnnotationDataWriter = new AvroDataWriter<>(path, gzip, VariantAnnotation.getClassSchema());
        } else {
            variantAnnotationDataWriter = new VariantAnnotationJsonDataWriter(path, gzip);
        }

        ParallelTaskRunner.Config config = ParallelTaskRunner.Config.builder().setNumTasks(numThreads)
                .setBatchSize(batchSize).setAbortOnFail(true).setSorted(false).build();
        ParallelTaskRunner<Variant, VariantAnnotation> parallelTaskRunner = new ParallelTaskRunner<>(
                variantDataReader, annotationTask, variantAnnotationDataWriter, config);
        parallelTaskRunner.run();
    } catch (ExecutionException e) {
        throw new VariantAnnotatorException("Error creating annotations", e);
    }

    return fileUri;
}

From source file:org.wso2.appserver.integration.tests.usermgt.UserManagementWithAdminUserTestCase.java

@Test(groups = "wso2.as", description = "Upload users in bulk")
public void testBulkUserUpload() throws Exception {
    Path filePath = Paths.get(FrameworkPathUtil.getSystemResourceLocation(), "artifacts", "AS", "usermgt",
            USER_CSV_FILE_NAME);//w ww  .  java 2 s  .  c o  m
    DataHandler handler = new DataHandler(filePath.toUri().toURL());
    userManagementClient.bulkImportUsers(filePath.toString(), handler, "abc123");
    ArrayList<String> users = (ArrayList<String>) FileUtils.readLines(filePath.toFile());
    users.remove(0); // Remove the username
    HashSet<String> userList = userManagementClient.getUserList();
    for (String user : users) {
        assertTrue(userList.contains(user), "Username " + user + " doesn't exist");
        //assertNotNull(loginLogoutClient.login(user, "abc123", asServer.getInstance().getHosts().get("default")));
        userManagementClient.deleteUser(user);
    }
}

From source file:org.apache.jena.osgi.test.JenaOSGITest.java

@Test
public void testJenaArq() throws Exception {
    Dataset dataset = DatasetFactory.createMem();
    dataset.addNamedModel(EXAMPLE_COM_GRAPH, makeModel());

    Path path = Files.createTempFile("example", ".jsonld");
    // System.out.println(path);
    path.toFile().deleteOnExit();/*w ww.j  a v a2  s. co  m*/

    try (OutputStream output = Files.newOutputStream(path)) {
        RDFDataMgr.write(output, dataset, Lang.JSONLD);
    }
    // We test JSON-LD as it involves multiple other bundles

    Dataset dataset2 = RDFDataMgr.loadDataset(path.toUri().toString());
    assertTrue(dataset2.containsNamedModel(EXAMPLE_COM_GRAPH));

    runQuery(dataset2);

}

From source file:com.marklogic.entityservices.e2e.CSVLoader.java

public void go() throws InterruptedException {

    logger.info("job started.");

    File dir = new File(projectDir + "/data/superstore-csv");

    WriteHostBatcher batcher = moveMgr.newWriteHostBatcher().withBatchSize(100).withThreadCount(10)
            .onBatchSuccess((client, batch) -> logger.info(getSummaryReport(batch)))
            .onBatchFailure((client, batch, throwable) -> {
                logger.warn("FAILURE on batch:" + batch.toString() + "\n", throwable);
                throwable.printStackTrace();
            });//from  ww w  . j a v  a  2 s  .co m

    ticket = moveMgr.startJob(batcher);

    try (DirectoryStream<Path> stream = Files.newDirectoryStream(dir.toPath(), "*.csv")) {
        for (Path entry : stream) {
            logger.debug("Adding " + entry.getFileName().toString());

            MappingIterator<ObjectNode> it = csvMapper.readerFor(ObjectNode.class).with(bootstrapSchema)
                    .readValues(entry.toFile());
            long i = 0;
            while (it.hasNext()) {
                ObjectNode jsonNode = it.next();
                String jsonString = mapper.writeValueAsString(jsonNode);

                String uri = entry.toUri().toString() + "-" + Long.toString(i++) + ".json";
                DocumentMetadataHandle metadata = new DocumentMetadataHandle() //
                        .withCollections("raw", "csv") //
                        .withPermission("nwind-reader", Capability.READ) //
                        .withPermission("nwind-writer", Capability.INSERT, Capability.UPDATE);
                batcher.add(uri, metadata, new StringHandle(jsonString));
                if (i % 1000 == 0)
                    logger.debug("Inserting JSON document " + uri);
            }
            it.close();
        }
    }

    catch (IOException e)

    {
        e.printStackTrace();
    }

    batcher.flush();
}