Example usage for java.nio.file StandardOpenOption CREATE_NEW

List of usage examples for java.nio.file StandardOpenOption CREATE_NEW

Introduction

In this page you can find the example usage for java.nio.file StandardOpenOption CREATE_NEW.

Prototype

StandardOpenOption CREATE_NEW

To view the source code for java.nio.file StandardOpenOption CREATE_NEW.

Click Source Link

Document

Create a new file, failing if the file already exists.

Usage

From source file:org.eclipse.jgit.lfs.server.fs.LfsServerTest.java

/**
 * Creates a file with random content, repeatedly writing a random string of
 * 4k length to the file until the file has at least the specified length.
 *
 * @param f//from w w  w  . ja  va  2  s  .  co m
 *            file to fill
 * @param size
 *            size of the file to generate
 * @return length of the generated file in bytes
 * @throws IOException
 */
protected long createPseudoRandomContentFile(Path f, long size) throws IOException {
    SecureRandom rnd = new SecureRandom();
    byte[] buf = new byte[4096];
    rnd.nextBytes(buf);
    ByteBuffer bytebuf = ByteBuffer.wrap(buf);
    try (FileChannel outChannel = FileChannel.open(f, StandardOpenOption.CREATE_NEW,
            StandardOpenOption.WRITE)) {
        long len = 0;
        do {
            len += outChannel.write(bytebuf);
            if (bytebuf.position() == 4096) {
                bytebuf.rewind();
            }
        } while (len < size);
    }
    return Files.size(f);
}

From source file:fr.gael.dhus.sync.impl.ODataProductSynchronizer.java

/**
 * Uses the given `http_client` to download `url` into `out_tmp`.
 * Renames `out_tmp` to the value of the filename param of the Content-Disposition header field.
 * Returns a path to the renamed file./*from   www.  j a v  a2  s  .  c om*/
 *
 * @param http_client synchronous interruptible HTTP client.
 * @param out_tmp download destination file on disk (will be created if does not exist).
 * @param url what to download.
 * @return Path to file with its actual name.
 * @throws IOException Anything went wrong (with IO or network, or if the HTTP header field
 *       Content-Disposition is missing).
 * @throws InterruptedException Thread has been interrupted.
 */
private DownloadResult downloadValidateRename(InterruptibleHttpClient http_client, Path out_tmp, String url)
        throws IOException, InterruptedException {
    try (FileChannel output = FileChannel.open(out_tmp, StandardOpenOption.CREATE_NEW,
            StandardOpenOption.WRITE)) {

        HttpResponse response = http_client.interruptibleGet(url, output);

        // If the response's status code is not 200, something wrong happened
        if (response.getStatusLine().getStatusCode() != HttpStatus.SC_OK) {
            Formatter ff = new Formatter();
            ff.format(
                    "Synchronizer#%d cannot download product at %s,"
                            + " remote dhus returned message '%s' (HTTP%d)",
                    getId(), url, response.getStatusLine().getReasonPhrase(),
                    response.getStatusLine().getStatusCode());
            throw new IOException(ff.out().toString());
        }

        // Gets the filename from the HTTP header field `Content-Disposition'
        Pattern pat = Pattern.compile("filename=\"(.+?)\"", Pattern.CASE_INSENSITIVE);
        String contdis = response.getFirstHeader("Content-Disposition").getValue();
        Matcher m = pat.matcher(contdis);
        if (!m.find()) {
            throw new IOException("Synchronizer#" + getId()
                    + " Missing HTTP header field `Content-Disposition` that determines the filename");
        }
        String filename = m.group(1);
        if (filename == null || filename.isEmpty()) {
            throw new IOException(
                    "Synchronizer#" + getId() + " Invalid filename in HTTP header field `Content-Disposition`");
        }

        // Renames the downloaded file
        output.close();
        Path dest = out_tmp.getParent().resolve(filename);
        Files.move(out_tmp, dest, StandardCopyOption.ATOMIC_MOVE);

        DownloadResult res = new DownloadResult(dest, response.getEntity().getContentType().getValue(),
                response.getEntity().getContentLength());

        return res;
    } finally {
        if (Files.exists(out_tmp)) {
            Files.delete(out_tmp);
        }
    }
}

From source file:de.tiqsolutions.hdfs.HadoopFileSystemProvider.java

@Override
public void copy(Path source, Path target, CopyOption... options) throws IOException {
    List<CopyOption> optionList = Arrays.asList(options);
    if (!optionList.contains(StandardCopyOption.REPLACE_EXISTING)) {
        if (Files.exists(target))
            throw new java.nio.file.FileAlreadyExistsException(source.toString(), target.toString(),
                    "could not copy file to destination");
    } else {/*  w  w w.ja  v a  2 s  .co  m*/
        Files.deleteIfExists(target);
    }

    FileSystem sourceFS = source.getFileSystem();
    FileSystem targetFS = target.getFileSystem();

    if (optionList.contains(HadoopCopyOption.REMOTE_COPY) && sourceFS.equals(targetFS)) {

        remoteCopy(source, target, options);
        return;

    }
    try (SeekableByteChannel sourceChannel = sourceFS.provider().newByteChannel(source,
            EnumSet.of(StandardOpenOption.READ))) {

        Set<StandardOpenOption> openOptions = EnumSet.of(StandardOpenOption.WRITE);

        if (optionList.contains(StandardCopyOption.REPLACE_EXISTING))
            openOptions.add(StandardOpenOption.CREATE);
        else
            openOptions.add(StandardOpenOption.CREATE_NEW);
        List<FileAttribute<?>> fileAttributes = new ArrayList<>();
        if (optionList.contains(StandardCopyOption.COPY_ATTRIBUTES)) {

            Set<String> sourceAttrViews = sourceFS.supportedFileAttributeViews();
            Set<String> targetAttrViews = targetFS.supportedFileAttributeViews();
            if (sourceAttrViews.contains(PosixFileAttributeViewImpl.NAME)
                    && targetAttrViews.contains(PosixFileAttributeViewImpl.NAME)) {
                PosixFileAttributes posixAttributes = sourceFS.provider().readAttributes(source,
                        PosixFileAttributes.class);
                fileAttributes.add(PosixFilePermissions.asFileAttribute(posixAttributes.permissions()));
            }

            if (sourceAttrViews.contains(HadoopFileAttributeViewImpl.NAME)
                    && targetAttrViews.contains(HadoopFileAttributeViewImpl.NAME)) {
                final HadoopFileAttributes hdfsAttributes = sourceFS.provider().readAttributes(source,
                        HadoopFileAttributes.class);
                fileAttributes.add(new FileAttribute<Long>() {
                    @Override
                    public String name() {
                        return HadoopFileAttributeViewImpl.NAME + ":blockSize";
                    }

                    @Override
                    public Long value() {
                        return hdfsAttributes.getBlockSize();
                    }
                });
                fileAttributes.add(new FileAttribute<Short>() {
                    @Override
                    public String name() {
                        return HadoopFileAttributeViewImpl.NAME + ":replication";
                    }

                    @Override
                    public Short value() {
                        return hdfsAttributes.getReplication();
                    }
                });

            }
        }

        FileAttribute<?>[] attributes = fileAttributes.toArray(new FileAttribute<?>[fileAttributes.size()]);

        try (SeekableByteChannel targetChannel = targetFS.provider().newByteChannel(target, openOptions,
                attributes)) {
            int buffSize = getConfiguration().getInt(DFSConfigKeys.DFS_STREAM_BUFFER_SIZE_KEY,
                    DFSConfigKeys.DFS_STREAM_BUFFER_SIZE_DEFAULT);
            ByteBuffer buffer = ByteBuffer.allocate(buffSize);
            buffer.clear();
            while (sourceChannel.read(buffer) > 0) {
                buffer.flip();
                targetChannel.write(buffer);
                buffer.clear();
            }

        }
        if (optionList.contains(StandardCopyOption.COPY_ATTRIBUTES)) {
            BasicFileAttributes attrs = sourceFS.provider().readAttributes(source, BasicFileAttributes.class);
            BasicFileAttributeView view = targetFS.provider().getFileAttributeView(target,
                    BasicFileAttributeView.class);
            view.setTimes(attrs.lastModifiedTime(), attrs.lastAccessTime(), attrs.creationTime());

        }

    }

}

From source file:codes.thischwa.c5c.UserObjectProxy.java

public static java.nio.file.Path removeExif(java.nio.file.Path tempPath) {
    if (exifRemover == null)
        return tempPath;
    try {//  w w w .  j a v a2  s  .  co m
        String fileName = tempPath.toString();
        String ext = FilenameUtils.getExtension(fileName);

        java.nio.file.Path woExifPath = Paths.get(tempPath.toString() + "_woExif");
        boolean removed = exifRemover.removeExif(Files.newInputStream(tempPath),
                Files.newOutputStream(woExifPath, StandardOpenOption.CREATE_NEW), ext);
        logger.debug("potential exif data removed: {}", removed);
        return (removed) ? woExifPath : tempPath;
    } catch (IOException e) {
        logger.warn("Error while removing EXIF data.", e);
        return tempPath;
    }
}

From source file:herddb.cli.HerdDBCLI.java

private static void backupTableSpace(final Statement statement, String schema, String file, String suffix,
        final Connection connection, int dumpfetchsize) throws Exception, SQLException {
    List<String> tablesToDump = new ArrayList<>();
    try (ResultSet rs = statement.executeQuery(
            "SELECT table_name" + " FROM " + schema + ".systables" + " WHERE systemtable='false'")) {
        while (rs.next()) {
            String tablename = rs.getString(1).toLowerCase();
            tablesToDump.add(tablename);
        }/*from w w  w  .  ja  v  a2 s. c o m*/
    }
    int dot = file.lastIndexOf('.');
    String ext = "";
    if (dot >= 0) {
        ext = file.substring(dot);
        file = file.substring(0, dot);
    }
    String finalFile = (suffix == null ? file : file + suffix) + ext;
    Path outputfile = Paths.get(finalFile).toAbsolutePath();
    println("Backup tables " + tablesToDump + " from tablespace " + schema + " to " + outputfile);

    try (OutputStream fout = wrapOutputStream(Files.newOutputStream(outputfile, StandardOpenOption.CREATE_NEW),
            ext); SimpleBufferedOutputStream oo = new SimpleBufferedOutputStream(fout, 16 * 1024 * 1024);) {
        HerdDBConnection hcon = connection.unwrap(HerdDBConnection.class);
        HDBConnection hdbconnection = hcon.getConnection();
        BackupUtils.dumpTableSpace(schema, dumpfetchsize, hdbconnection, oo, new ProgressListener() {
            @Override
            public void log(String actionType, String message, Map<String, Object> context) {
                println(message);
            }

        });
    }
    println("Backup finished for tablespace " + schema);
}

From source file:com.streamsets.pipeline.stage.origin.logtail.TestFileTailSource.java

@Test
public void testPendingFilesMetric() throws Exception {
    final File testDataDir = new File("target", UUID.randomUUID().toString());

    Assert.assertTrue(testDataDir.mkdirs());

    final List<File> files = Arrays.asList(new File(testDataDir, "file.txt-1"),
            new File(testDataDir, "file.txt-2"), new File(testDataDir, "file.txt-3"),
            new File(testDataDir, "file.txt-4"), new File(testDataDir, "file.txt-5"),
            new File(testDataDir, "file.txt-6"), new File(testDataDir, "file.txt-7"),
            new File(testDataDir, "file.txt-8"));

    //We will create first 4 files here. Rest of the four files will be created
    //before we calculate the pending files metric.
    for (int i = 0; i < 4; i++) {
        File file = files.get(i);
        Files.write(file.toPath(), Arrays.asList("A", "B", "C"), StandardCharsets.UTF_8,
                StandardOpenOption.CREATE_NEW);
    }//  w  ww  .  j  av  a 2  s  . c  om

    FileTailSource source = PowerMockito.spy((FileTailSource) createSourceForPeriodicFile(
            testDataDir.getAbsolutePath() + "/file.txt-${PATTERN}", "[0-9]"));

    //Intercept calculatePendingFilesMetric private method which calculates the pendingFiles
    //and create new files.
    PowerMockito.replace(MemberMatcher.method(FileTailSource.class, "calculatePendingFilesMetric"))
            .with(new InvocationHandler() {
                @Override
                public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
                    //Create the remaining 4 files so as to have files which are pending and not being started for processing.
                    for (int i = 4; i < 8; i++) {
                        File file = files.get(i);
                        Files.write(file.toPath(), Arrays.asList("A", "B", "C"), StandardCharsets.UTF_8,
                                StandardOpenOption.CREATE_NEW);
                    }
                    //call the real getOffsetsLag private method
                    return method.invoke(proxy, args);
                }
            });

    SourceRunner runner = createRunner(source);
    try {
        runner.runInit();

        StageRunner.Output output = runner.runProduce(null, 36);

        // Make sure there are only 12 (3 for each file we read).
        Assert.assertEquals(12, output.getRecords().get("lane").size());

        Map<String, Counter> pendingFilesMetric = (Map<String, Counter>) Whitebox.getInternalState(source,
                "pendingFilesMetric");
        Assert.assertEquals(4L, pendingFilesMetric
                .get(testDataDir.getAbsolutePath() + "/file.txt-${PATTERN}||[0-9]").getCount());

    } finally {
        runner.runDestroy();
    }
}

From source file:com.streamsets.pipeline.stage.origin.logtail.TestFileTailSource.java

@Test
public void testGlobbingForPeriodicRollPattern() throws Exception {
    final File testDataDir = new File("target", UUID.randomUUID().toString());

    Assert.assertTrue(testDataDir.mkdirs());

    final List<Path> dirPaths = Arrays.asList(
            Paths.get(testDataDir.getAbsolutePath() + File.separatorChar + "a1" + File.separatorChar + "b1"),
            Paths.get(testDataDir.getAbsolutePath() + File.separatorChar + "a2" + File.separatorChar + "b2"),
            Paths.get(testDataDir.getAbsolutePath() + File.separatorChar + "a3" + File.separatorChar + "b3"),
            Paths.get(testDataDir.getAbsolutePath() + File.separatorChar + "a4" + File.separatorChar + "b4"),
            Paths.get(testDataDir.getAbsolutePath() + File.separatorChar + "a5" + File.separatorChar + "b5"),
            Paths.get(testDataDir.getAbsolutePath() + File.separatorChar + "a6" + File.separatorChar + "b6"));

    for (int i = 0; i < dirPaths.size(); i++) {
        Path dirPath = dirPaths.get(i);
        Files.createDirectories(dirPath);
        Path filePath1 = Paths
                .get(dirPath.toString() + File.separatorChar + "file-" + String.valueOf(i) + ".txt");
        Files.write(filePath1, Arrays.asList("A", "B", "C"), StandardCharsets.UTF_8,
                StandardOpenOption.CREATE_NEW);
    }//from   ww  w  . j  ava2 s .c  om

    FileInfo fileInfo = new FileInfo();
    //We are looking for testdataDir/*/*/file-[0-9]+.txt
    fileInfo.fileFullPath = testDataDir.getAbsolutePath() + File.separatorChar + "*" + File.separatorChar + "*"
            + File.separatorChar + "file-${PATTERN}.txt";
    fileInfo.fileRollMode = FileRollMode.PATTERN;
    fileInfo.firstFile = "";
    fileInfo.patternForToken = "[0-9]+";

    FileTailConfigBean conf = new FileTailConfigBean();
    conf.dataFormat = DataFormat.TEXT;
    conf.multiLineMainPattern = "";
    conf.batchSize = 25;
    conf.maxWaitTimeSecs = 1;
    conf.fileInfos = Arrays.asList(fileInfo);
    conf.postProcessing = PostProcessingOptions.NONE;

    conf.dataFormatConfig.textMaxLineLen = 1024;

    FileTailSource source = new FileTailSource(conf, SCAN_INTERVAL);

    SourceRunner runner = createRunner(source);
    try {
        // run till current end and stop pipeline
        runner.runInit();
        StageRunner.Output output = runner.runProduce(null, 30);
        // (6 folders * 1 file * 3 records) = 18 records
        Assert.assertEquals(18L, output.getRecords().get("lane").size());

    } finally {
        runner.runDestroy();
    }
}

From source file:com.streamsets.pipeline.stage.origin.logtail.TestFileTailSource.java

@Test
public void testMetricsWithGlobbingAndLateDirectory() throws Exception {
    final File testDataDir = new File("target", UUID.randomUUID().toString());

    final List<Path> dirPaths = Arrays.asList(
            Paths.get(testDataDir.getAbsolutePath() + File.separatorChar + "a1" + File.separatorChar + "const"
                    + File.separatorChar + "b1"),
            Paths.get(testDataDir.getAbsolutePath() + File.separatorChar + "a2" + File.separatorChar + "const"
                    + File.separatorChar + "b2"),
            Paths.get(testDataDir.getAbsolutePath() + File.separatorChar + "a3" + File.separatorChar + "const"
                    + File.separatorChar + "b3"),
            Paths.get(testDataDir.getAbsolutePath() + File.separatorChar + "a4" + File.separatorChar + "const"
                    + File.separatorChar + "b4"),
            Paths.get(testDataDir.getAbsolutePath() + File.separatorChar + "a5" + File.separatorChar + "const"
                    + File.separatorChar + "b5"),
            Paths.get(testDataDir.getAbsolutePath() + File.separatorChar + "a6" + File.separatorChar + "const"
                    + File.separatorChar + "b6"),
            Paths.get(testDataDir.getAbsolutePath() + File.separatorChar + "a7" + File.separatorChar + "const"
                    + File.separatorChar + "b7"),
            Paths.get(testDataDir.getAbsolutePath() + File.separatorChar + "a8" + File.separatorChar + "const"
                    + File.separatorChar + "b8"),
            Paths.get(testDataDir.getAbsolutePath() + File.separatorChar + "a9" + File.separatorChar + "const"
                    + File.separatorChar + "b9"),
            Paths.get(testDataDir.getAbsolutePath() + File.separatorChar + "a10" + File.separatorChar + "const"
                    + File.separatorChar + "b10"));

    FileInfo fileInfo = new FileInfo();
    //We are looking for testdataDir/*/const/*/file-[0-9]+.txt
    fileInfo.fileFullPath = testDataDir.getAbsolutePath() + File.separatorChar + "*" + File.separatorChar
            + "const" + File.separatorChar + "*" + File.separatorChar + "file-${PATTERN}.txt";
    fileInfo.fileRollMode = FileRollMode.PATTERN;
    fileInfo.firstFile = "";
    fileInfo.patternForToken = "[0-9]+";

    FileTailConfigBean conf = new FileTailConfigBean();
    conf.dataFormat = DataFormat.TEXT;/*  w ww.  ja v  a  2  s . c o  m*/
    conf.multiLineMainPattern = "";
    conf.batchSize = 40;
    conf.maxWaitTimeSecs = 40;
    conf.fileInfos = Arrays.asList(fileInfo);
    conf.postProcessing = PostProcessingOptions.NONE;
    conf.allowLateDirectories = true;

    conf.dataFormatConfig.textMaxLineLen = 1024;

    FileTailSource source = PowerMockito.spy(new FileTailSource(conf, SCAN_INTERVAL));

    SourceRunner runner = createRunner(source);

    // run till current end and stop pipeline
    runner.runInit();

    //Late directory(testDataDir) creation
    Assert.assertTrue(testDataDir.mkdirs());

    for (int i = 0; i < dirPaths.size(); i++) {
        Path dirPath = dirPaths.get(i);
        Files.createDirectories(dirPath);
        Path filePath = Paths
                .get(dirPath.toString() + File.separatorChar + "file-" + String.valueOf(i) + ".txt");
        Files.write(filePath, Arrays.asList("A", "B", "C", "D", "E", "F", "G", "H", "I", "J"), //10 records * 2 bytes = 20 bytes.
                StandardCharsets.UTF_8, StandardOpenOption.CREATE_NEW);
    }

    setSleepTimeForFindingPaths(10000L, 2000L);

    try {
        //We have totally 10(folders) * 1(file) * 10 (records) = 100 records
        //Also means totally 100 records * 2 bytes = 200 bytes.
        //We will read only 20 records.
        //This means the total is (remaining 80 records * 2 bytes) = 160 bytes yet to be read.
        //including pending files and offsetLag.
        StageRunner.Output output = runner.runProduce(null, 20);
        Assert.assertEquals(20L, output.getRecords().get("lane").size());
        checkPendingAndOffsetLag(source, 160L);

        //All files would have been found, don't need to wait for finding them.
        setSleepTimeForFindingPaths(0L, 0L);

        //We are gonna read another 40 records and check
        //If we read 40 more records this means, we will end up reading another 40*2 = 80 bytes
        //Total remaining bytes to read is 160 - 80 = 80 bytes
        output = runner.runProduce(output.getNewOffset(), 40);
        Assert.assertEquals(40L, output.getRecords().get("lane").size());
        checkPendingAndOffsetLag(source, 80L);

        //We are gonna read 40 records and check
        //If we read 40 more records this means, we will end up reading another 40*2 = 80 bytes
        //Total remaining bytes to read is 80 - 80 = 0 bytes.
        output = runner.runProduce(output.getNewOffset(), 40);
        Assert.assertEquals(40L, output.getRecords().get("lane").size());
        checkPendingAndOffsetLag(source, 0L);
    } finally {
        runner.runDestroy();
    }
}

From source file:org.apache.archiva.checksum.ChecksummedFile.java

/**
 * Creates a checksum file of the provided referenceFile.
 *
 * @param checksumAlgorithm the hash to use.
 * @return the checksum File that was created.
 * @throws IOException if there was a problem either reading the referenceFile, or writing the checksum file.
 *///  w w w  .  j av a  2 s  .c  om
public File createChecksum(ChecksumAlgorithm checksumAlgorithm) throws IOException {
    File checksumFile = new File(referenceFile.getAbsolutePath() + "." + checksumAlgorithm.getExt());
    Files.deleteIfExists(checksumFile.toPath());
    String checksum = calculateChecksum(checksumAlgorithm);
    Files.write(checksumFile.toPath(), //
            (checksum + "  " + referenceFile.getName()).getBytes(), //
            StandardOpenOption.CREATE_NEW);
    return checksumFile;
}