Example usage for java.nio.file Paths get

List of usage examples for java.nio.file Paths get

Introduction

In this page you can find the example usage for java.nio.file Paths get.

Prototype

public static Path get(URI uri) 

Source Link

Document

Converts the given URI to a Path object.

Usage

From source file:ratpack.spark.jobserver.Main.java

public static void main(String... args) throws Exception {
    RatpackServer ratpackServer = RatpackServer.start(spec -> spec.serverConfig(builder -> {
        Path basePath = BaseDir.find("application.properties");
        LOGGER.debug("BASE DIR: {}", basePath.toString());
        builder.baseDir(BaseDir.find("application.properties")).env().sysProps();

        Path localAppProps = Paths.get("../../config/application.properties");
        if (Files.exists(localAppProps)) {
            LOGGER.debug("LOCALLY OVERLOADED application.properties: {}", localAppProps.toUri().toString());
            builder.props(localAppProps);
        } else {/*from  www.ja v  a  2s.c om*/
            URL cpAppProps = Main.class.getClassLoader().getResource("config/application.properties");
            LOGGER.debug("CLASSPATH OVERLOADED application.properties: {}",
                    cpAppProps != null ? cpAppProps.toString() : "DEFAULT LOCATION");
            builder.props(cpAppProps != null ? cpAppProps
                    : Main.class.getClassLoader().getResource("application.properties"));
        }

        Path localSparkJobsProps = Paths.get("../../config/sparkjobs.properties");
        if (Files.exists(localSparkJobsProps)) {
            LOGGER.debug("LOCALLY OVERLOADED sparkjobs.properties: {}", localSparkJobsProps.toUri().toString());
            builder.props(localSparkJobsProps);
        } else {
            URL cpSparkJobsProps = Main.class.getClassLoader().getResource("config/sparkjobs.properties");
            LOGGER.debug("CLASSPATH OVERLOADED SPARKJOBS.PROPS: {}",
                    cpSparkJobsProps != null ? cpSparkJobsProps.toString() : "DEFAULT LOCATION");
            builder.props(cpSparkJobsProps != null ? cpSparkJobsProps
                    : Main.class.getClassLoader().getResource("sparkjobs.properties"));
        }

        builder.require("/spark", SparkConfig.class).require("/job", SparkJobsConfig.class);
    }).registry(Guice.registry(bindingsSpec -> bindingsSpec.bindInstance(ResponseTimer.decorator())
            .module(ContainersModule.class).module(SparkModule.class)
            .bindInstance(new ObjectMapper().writerWithDefaultPrettyPrinter())))
            .handlers(chain -> chain.all(ctx -> {
                LOGGER.debug("ALL");
                MDC.put("clientIP", ctx.getRequest().getRemoteAddress().getHostText());
                RequestId.Generator generator = ctx.maybeGet(RequestId.Generator.class)
                        .orElse(UuidBasedRequestIdGenerator.INSTANCE);
                RequestId requestId = generator.generate(ctx.getRequest());
                ctx.getRequest().add(RequestId.class, requestId);
                MDC.put("requestId", requestId.toString());
                ctx.next();
            }).prefix("v1", chain1 -> chain1.all(RequestLogger.ncsa()).get("api-def", ctx -> {
                LOGGER.debug("GET API_DEF.JSON");
                SparkJobsConfig config = ctx.get(SparkJobsConfig.class);
                LOGGER.debug("SPARK JOBS CONFIG: " + config.toString());
                ctx.render(ctx.file("public/apidef/apidef.json"));
            }).prefix("spark", JobsEndpoints.class))));
    LOGGER.debug("STARTED: {}://{}:{}", ratpackServer.getScheme(), ratpackServer.getBindHost(),
            ratpackServer.getBindPort());
}

From source file:listfiles.ListFiles.java

/**
 * @param args the command line arguments
 *//*from  w w  w  .  j a  va 2s  .c  o m*/
public static void main(String[] args) {
    // TODO code application logic here
    BufferedReader reader = new BufferedReader(new InputStreamReader(System.in));
    String folderPath = "";
    String fileName = "DirectoryFiles.xlsx";

    try {
        System.out.println("Folder path :");
        folderPath = reader.readLine();
        //System.out.println("Output File Name :");
        //fileName = reader.readLine();

        XSSFWorkbook wb = new XSSFWorkbook();
        FileOutputStream fileOut = new FileOutputStream(folderPath + "\\" + fileName);
        XSSFSheet sheet1 = wb.createSheet("Files");
        int row = 0;
        Stream<Path> stream = Files.walk(Paths.get(folderPath));
        Iterator<Path> pathIt = stream.iterator();
        String ext = "";

        while (pathIt.hasNext()) {
            Path filePath = pathIt.next();
            Cell cell1 = checkRowCellExists(sheet1, row, 0);
            Cell cell2 = checkRowCellExists(sheet1, row, 1);
            row++;
            ext = FilenameUtils.getExtension(filePath.getFileName().toString());
            cell1.setCellValue(filePath.getFileName().toString());
            cell2.setCellValue(ext);

        }
        sheet1.autoSizeColumn(0);
        sheet1.autoSizeColumn(1);

        wb.write(fileOut);
        fileOut.close();
    } catch (IOException e) {
        e.printStackTrace();
    }
    System.out.println("Program Finished");
}

From source file:edu.jhu.hlt.concrete.ingesters.annotatednyt.AnnotatedNYTIngesterRunner.java

/**
 * @param args/*from  www  .  j a v a2s .  co m*/
 */
public static void main(String... args) {
    Thread.setDefaultUncaughtExceptionHandler(new LoggedUncaughtExceptionHandler());
    AnnotatedNYTIngesterRunner run = new AnnotatedNYTIngesterRunner();
    JCommander jc = new JCommander(run, args);
    jc.setProgramName(AnnotatedNYTIngesterRunner.class.getSimpleName());
    if (run.delegate.help) {
        jc.usage();
    }

    try {
        Path outpath = Paths.get(run.delegate.outputPath);
        IngesterParameterDelegate.prepare(outpath);

        NYTCorpusDocumentParser parser = new NYTCorpusDocumentParser();
        for (String pstr : run.delegate.paths) {
            LOGGER.debug("Running on file: {}", pstr);
            Path p = Paths.get(pstr);
            new ExistingNonDirectoryFile(p);
            int nPaths = p.getNameCount();
            Path year = p.getName(nPaths - 2);
            Path outWithExt = outpath.resolve(year.toString() + p.getFileName());

            if (Files.exists(outWithExt)) {
                if (!run.delegate.overwrite) {
                    LOGGER.info("File: {} exists and overwrite disabled. Not running.", outWithExt.toString());
                    continue;
                } else {
                    Files.delete(outWithExt);
                }
            }

            try (InputStream is = Files.newInputStream(p);
                    BufferedInputStream bin = new BufferedInputStream(is);
                    TarGzArchiveEntryByteIterator iter = new TarGzArchiveEntryByteIterator(bin);

                    OutputStream os = Files.newOutputStream(outWithExt);
                    GzipCompressorOutputStream gout = new GzipCompressorOutputStream(os);
                    TarArchiver arch = new TarArchiver(gout)) {
                Iterable<byte[]> able = () -> iter;
                StreamSupport.stream(able.spliterator(), false).map(ba -> parser.fromByteArray(ba, false))
                        .map(doc -> new AnnotatedNYTDocument(doc))
                        .map(and -> new CommunicationizableAnnotatedNYTDocument(and).toCommunication())
                        .forEach(comm -> {
                            try {
                                arch.addEntry(new ArchivableCommunication(comm));
                            } catch (IOException e) {
                                LOGGER.error("Caught exception processing file: " + pstr, e);
                            }
                        });
            }
        }
    } catch (NotFileException | IOException e) {
        LOGGER.error("Caught exception processing.", e);
    }
}

From source file:ru.histone.staticrender.StaticRender.java

public static void main(String... args) throws IOException {
    Path workDir = Paths.get(".").toRealPath();
    log.info("Working dir={}", workDir.toString());

    Path srcDir = workDir.resolve("src/site");
    Path dstDir = workDir.resolve("build/site");

    StaticRender app = new StaticRender();
    app.renderSite(srcDir, dstDir);//from  ww  w .  ja  v  a 2  s.  c o  m
}

From source file:io.fabric8.vertx.maven.plugin.FileFilterMain.java

public static void main(String[] args) {

    Commandline commandline = new Commandline();
    commandline.setExecutable("java");
    commandline.createArg().setValue("io.vertx.core.Launcher");
    commandline.createArg().setValue("--redeploy=target/**/*");

    System.out.println(commandline);//  w  ww  .  j a va  2 s .c  om

    File baseDir = new File("/Users/kameshs/git/fabric8io/vertx-maven-plugin/samples/vertx-demo");
    List<String> includes = new ArrayList<>();
    includes.add("src/**/*.java");
    //FileAlterationMonitor monitor  = null;
    try {

        Set<Path> inclDirs = new HashSet<>();

        includes.forEach(s -> {
            try {

                if (s.startsWith("**")) {
                    Path rootPath = Paths.get(baseDir.toString());
                    if (Files.exists(rootPath)) {
                        File[] dirs = rootPath.toFile().listFiles((dir, name) -> dir.isDirectory());
                        Objects.requireNonNull(dirs);
                        Stream.of(dirs).forEach(f -> inclDirs.add(Paths.get(f.toString())));
                    }
                } else if (s.contains("**")) {
                    String root = s.substring(0, s.indexOf("/**"));
                    Path rootPath = Paths.get(baseDir.toString(), root);
                    if (Files.exists(rootPath)) {
                        File[] dirs = rootPath.toFile().listFiles((dir, name) -> dir.isDirectory());
                        Objects.requireNonNull(dirs);
                        Stream.of(dirs).forEach(f -> inclDirs.add(Paths.get(f.toString())));
                    }
                }

                List<Path> dirs = FileUtils.getFileAndDirectoryNames(baseDir, s, null, true, true, true, true)
                        .stream().map(FileUtils::dirname).map(Paths::get)
                        .filter(p -> Files.exists(p) && Files.isDirectory(p)).collect(Collectors.toList());

                inclDirs.addAll(dirs);

            } catch (Exception e) {
                e.printStackTrace();
            }
        });

        FileAlterationMonitor monitor = fileWatcher(inclDirs);

        Runnable monitorTask = () -> {
            try {
                monitor.start();
            } catch (Exception e) {
                e.printStackTrace();
            }
        };

        monitorTask.run();

    } catch (Exception e) {
        e.printStackTrace();
    }

}

From source file:io.reactiverse.vertx.maven.plugin.FileFilterMain.java

public static void main(String[] args) {

    Commandline commandline = new Commandline();
    commandline.setExecutable("java");
    commandline.createArg().setValue("io.vertx.core.Launcher");
    commandline.createArg().setValue("--redeploy=target/**/*");

    System.out.println(commandline);// w  w w.  ja  v a  2 s.  co m

    File baseDir = new File("/Users/kameshs/git/reactiverse/vertx-maven-plugin/samples/vertx-demo");
    List<String> includes = new ArrayList<>();
    includes.add("src/**/*.java");
    //FileAlterationMonitor monitor  = null;
    try {

        Set<Path> inclDirs = new HashSet<>();

        includes.forEach(s -> {
            try {

                if (s.startsWith("**")) {
                    Path rootPath = Paths.get(baseDir.toString());
                    if (Files.exists(rootPath)) {
                        File[] dirs = rootPath.toFile().listFiles((dir, name) -> dir.isDirectory());
                        Objects.requireNonNull(dirs);
                        Stream.of(dirs).forEach(f -> inclDirs.add(Paths.get(f.toString())));
                    }
                } else if (s.contains("**")) {
                    String root = s.substring(0, s.indexOf("/**"));
                    Path rootPath = Paths.get(baseDir.toString(), root);
                    if (Files.exists(rootPath)) {
                        File[] dirs = rootPath.toFile().listFiles((dir, name) -> dir.isDirectory());
                        Objects.requireNonNull(dirs);
                        Stream.of(dirs).forEach(f -> inclDirs.add(Paths.get(f.toString())));
                    }
                }

                List<Path> dirs = FileUtils.getFileAndDirectoryNames(baseDir, s, null, true, true, true, true)
                        .stream().map(FileUtils::dirname).map(Paths::get)
                        .filter(p -> Files.exists(p) && Files.isDirectory(p)).collect(Collectors.toList());

                inclDirs.addAll(dirs);

            } catch (Exception e) {
                e.printStackTrace();
            }
        });

        FileAlterationMonitor monitor = fileWatcher(inclDirs);

        Runnable monitorTask = () -> {
            try {
                monitor.start();
            } catch (Exception e) {
                e.printStackTrace();
            }
        };

        monitorTask.run();

    } catch (Exception e) {
        e.printStackTrace();
    }

}

From source file:com.ibm.ie.tachyon.fuse.TachyonFuse.java

public static void main(String[] args) {
    final TachyonFuseOptions opts = parseOptions(args);
    if (opts == null) {
        System.exit(1);// w w w .  j ava 2  s . co  m
    }
    final TachyonFuseFs fs = new TachyonFuseFs(opts);
    final List<String> fuseOpts = opts.getFuseOpts();
    fuseOpts.add("-odirect_io");

    try {
        fs.mount(Paths.get(opts.getMountPoint()), true, opts.isDebug(), fuseOpts.toArray(new String[0]));
    } finally {
        fs.umount();

    }
}

From source file:test.jackson.JacksonNsgiDiscover.java

public static void main(String[] args) throws IOException {

    ObjectMapper objectMapper = new ObjectMapper().enable(SerializationFeature.INDENT_OUTPUT);

    String ngsiRcr = new String(Files.readAllBytes(Paths.get(NGSI_FILE)));

    DiscoveryContextAvailabilityRequest dcar = objectMapper.readValue(ngsiRcr,
            DiscoveryContextAvailabilityRequest.class);

    //        System.out.println(objectMapper.writeValueAsString(dcar));
    System.out.println(dcar.getRestriction().getOperationScope().get(1).getScopeValue());

    LinkedHashMap shapeHMap = (LinkedHashMap) dcar.getRestriction().getOperationScope().get(1).getScopeValue();
    //        Association assocObject =  objectMapper.convertValue(shapeHMap, Association.class);
    //        System.out.println(assocObject.getAttributeAssociation().get(0).getSourceAttribute());

    Shape shape = objectMapper.convertValue(shapeHMap, Shape.class);
    System.out.println("Deserialized Class: " + shape.getClass().getSimpleName());
    System.out.println("VALUE: " + shape.getPolygon().getVertices().get(2).getLatitude());
    System.out.println("VALUE: " + shape.getCircle());
    if (!(shape.getCircle() == null))
        System.out.println("This is null");

    Polygon polygon = shape.getPolygon();
    int vertexSize = polygon.getVertices().size();
    Coordinate[] coords = new Coordinate[vertexSize];

    final ArrayList<Coordinate> points = new ArrayList<>();
    for (int i = 0; i < vertexSize; i++) {
        Vertex vertex = polygon.getVertices().get(i);
        points.add(new Coordinate(Double.valueOf(vertex.getLatitude()), Double.valueOf(vertex.getLongitude())));
        coords[i] = new Coordinate(Double.valueOf(vertex.getLatitude()), Double.valueOf(vertex.getLongitude()));
    }//from   w  w  w.  j  av  a  2s .  c o  m
    points.add(new Coordinate(Double.valueOf(polygon.getVertices().get(0).getLatitude()),
            Double.valueOf(polygon.getVertices().get(0).getLongitude())));

    final GeometryFactory gf = new GeometryFactory();

    final Coordinate target = new Coordinate(49, -0.6);
    final Point point = gf.createPoint(target);

    Geometry shapeGm = gf.createPolygon(
            new LinearRing(new CoordinateArraySequence(points.toArray(new Coordinate[points.size()])), gf),
            null);
    //    Geometry shapeGm = gf.createPolygon(coords);    
    System.out.println(point.within(shapeGm));

    //        System.out.println(rcr.getContextRegistration().get(0).getContextMetadata().get(0).getValue().getClass().getCanonicalName());

    //        String assocJson = objectMapper.writeValueAsString(association);
    //        Value assocObject =  objectMapper.readValue(objectMapper.writeValueAsString(association), Value.class);
    //        System.out.println(association.values().toString());
    //        System.out.println(assocJson);

}

From source file:de.qaware.chronix.spark.api.java.ExternalizeTestData.java

/**
 * @param args optional first argument: file to serialize to. A default file name is provided.
 * @throws SolrServerException//from   w ww  .  j a va 2  s.c  o  m
 * @throws FileNotFoundException
 */
public static void main(String[] args) throws SolrServerException, IOException {

    ChronixSparkLoader chronixSparkLoader = new ChronixSparkLoader();
    ChronixYAMLConfiguration config = chronixSparkLoader.getConfig();

    String file = (args.length >= 1) ? args[0] : config.getTestdataFile();

    Path filePath = Paths.get(file);
    Files.deleteIfExists(filePath);
    Output output = new Output(new DeflaterOutputStream(new FileOutputStream(filePath.toString())));
    System.out.println("Opening test data file: " + filePath.toString());

    ChronixSparkContext cSparkContext = null;

    //Create target file
    try {
        //Create Chronix Spark context
        cSparkContext = chronixSparkLoader.createChronixSparkContext();

        //Read data into ChronixRDD
        SolrQuery query = new SolrQuery(config.getSolrReferenceQuery());
        ChronixRDD rdd = cSparkContext.queryChronixChunks(query, config.getZookeeperHost(),
                config.getChronixCollection(), config.getStorage());

        System.out.println("Writing " + rdd.count() + " time series into test data file.");

        //Loop through result and serialize it to disk
        Kryo kryo = new Kryo();
        List<MetricTimeSeries> mtsList = IteratorUtils.toList(rdd.iterator());
        System.out.println("Writing objects...");
        kryo.writeObject(output, mtsList);
        output.flush();
        System.out.println("Objects written.");
    } finally {
        output.close();
        if (cSparkContext != null) {
            cSparkContext.getSparkContext().close();
        }
        System.out.println("Test data file written successfully!");
    }
}

From source file:grakn.core.server.Grakn.java

public static void main(String[] args) {
    Thread.setDefaultUncaughtExceptionHandler(
            (Thread t, Throwable e) -> LOG.error(ErrorMessage.UNCAUGHT_EXCEPTION.getMessage(t.getName()), e));

    try {//from w  w  w  .  jav  a  2 s .  c o m
        String graknPidFileProperty = Optional.ofNullable(SystemProperty.GRAKN_PID_FILE.value()).orElseThrow(
                () -> new RuntimeException(ErrorMessage.GRAKN_PIDFILE_SYSTEM_PROPERTY_UNDEFINED.getMessage()));

        Path pidfile = Paths.get(graknPidFileProperty);
        PIDManager pidManager = new PIDManager(pidfile);
        pidManager.trackGraknPid();

        // Start Server with timer
        Stopwatch timer = Stopwatch.createStarted();
        boolean benchmark = parseBenchmarkArg(args);
        Server server = ServerFactory.createServer(benchmark);
        server.start();

        LOG.info("Grakn started in {}", timer.stop());
    } catch (RuntimeException | IOException e) {
        LOG.error(ErrorMessage.UNCAUGHT_EXCEPTION.getMessage(e.getMessage()), e);
        System.err.println(ErrorMessage.UNCAUGHT_EXCEPTION.getMessage(e.getMessage()));
    }
}