Example usage for java.nio.file Files size

List of usage examples for java.nio.file Files size

Introduction

In this page you can find the example usage for java.nio.file Files size.

Prototype

public static long size(Path path) throws IOException 

Source Link

Document

Returns the size of a file (in bytes).

Usage

From source file:org.opencb.cellbase.app.transform.GeneParser.java

@Deprecated
private void connect(Path genomeSequenceFilePath) throws ClassNotFoundException, SQLException, IOException {
    logger.info("Connecting to reference genome sequence database ...");
    Class.forName("org.sqlite.JDBC");
    sqlConn = DriverManager.getConnection(
            "jdbc:sqlite:" + genomeSequenceFilePath.getParent().toString() + "/reference_genome.db");
    if (!Files.exists(Paths.get(genomeSequenceFilePath.getParent().toString(), "reference_genome.db"))
            || Files.size(genomeSequenceFilePath.getParent().resolve("reference_genome.db")) == 0) {
        logger.info("Genome sequence database doesn't exists and will be created");
        Statement createTable = sqlConn.createStatement();
        createTable.executeUpdate("CREATE TABLE if not exists  "
                + "genome_sequence (sequenceName VARCHAR(50), chunkId VARCHAR(30), start INT, end INT, sequence VARCHAR(2000))");
        indexReferenceGenomeFasta(genomeSequenceFilePath);
    }/*from   w  ww  .  j  a v a2s .co  m*/
    indexedSequences = getIndexedSequences();
    sqlQuery = sqlConn.prepareStatement("SELECT sequence from genome_sequence WHERE chunkId = ? "); //AND start <= ? AND end >= ?
    logger.info("Genome sequence database connected");
}

From source file:org.apache.nifi.controller.repository.FileSystemRepository.java

@Override
public long size(final ContentClaim claim) throws IOException {
    if (claim == null) {
        return 0L;
    }//w w w. ja  v a 2  s.  com

    // see javadocs for claim.getLength() as to why we do this.
    if (claim.getLength() < 0) {
        return Files.size(getPath(claim, true)) - claim.getOffset();
    }

    return claim.getLength();
}

From source file:com.spectralogic.ds3client.integration.Smoke_Test.java

@Test
public void partialObjectGetOverChunkBoundry() throws IOException, XmlProcessingException {
    final String bucketName = "partialGetOverBoundry";
    final String testFile = "testObject.txt";
    final Path filePath = Files.createTempFile("ds3", testFile);
    final int seed = 12345;
    LOG.info("Test file: " + filePath.toAbsolutePath());
    try {//from  w  ww  .j av a 2 s  .c o  m
        HELPERS.ensureBucketExists(bucketName, envDataPolicyId);

        final int objectSize = PutBulkJobSpectraS3Request.MIN_UPLOAD_SIZE_IN_BYTES * 2;

        final List<Ds3Object> objs = Lists.newArrayList(new Ds3Object(testFile, objectSize));

        final Ds3ClientHelpers.Job putJob = HELPERS.startWriteJob(bucketName, objs, WriteJobOptions.create()
                .withMaxUploadSize(PutBulkJobSpectraS3Request.MIN_UPLOAD_SIZE_IN_BYTES));

        putJob.transfer(new Ds3ClientHelpers.ObjectChannelBuilder() {
            @Override
            public SeekableByteChannel buildChannel(final String key) throws IOException {
                final byte[] randomData = IOUtils.toByteArray(new RandomDataInputStream(seed, objectSize));
                final ByteBuffer randomBuffer = ByteBuffer.wrap(randomData);

                final ByteArraySeekableByteChannel channel = new ByteArraySeekableByteChannel(objectSize);
                channel.write(randomBuffer);

                return channel;

            }
        });

        final List<Ds3Object> partialObjectGet = Lists.newArrayList();
        partialObjectGet.add(new PartialDs3Object(testFile,
                Range.byPosition(PutBulkJobSpectraS3Request.MIN_UPLOAD_SIZE_IN_BYTES - 100,
                        PutBulkJobSpectraS3Request.MIN_UPLOAD_SIZE_IN_BYTES + 99)));

        final Ds3ClientHelpers.Job getJob = HELPERS.startReadJob(bucketName, partialObjectGet);

        getJob.transfer(new Ds3ClientHelpers.ObjectChannelBuilder() {
            @Override
            public SeekableByteChannel buildChannel(final String key) throws IOException {
                return Files.newByteChannel(filePath, StandardOpenOption.WRITE, StandardOpenOption.CREATE);
            }
        });

        assertThat(Files.size(filePath), is(200L));

    } finally {
        Files.delete(filePath);
        deleteAllContents(client, bucketName);
    }
}

From source file:ddf.catalog.impl.CatalogFrameworkImpl.java

private void generateMetacardAndContentItems(StorageRequest storageRequest,
        List<ContentItem> incomingContentItems, Map<String, Metacard> metacardMap,
        List<ContentItem> contentItems, Map<String, Path> tmpContentPaths) throws IngestException {
    for (ContentItem contentItem : incomingContentItems) {
        try {//  w  ww  .  j a  v  a2  s  . c o m
            Path tmpPath = null;
            long size;
            try {
                if (contentItem.getInputStream() != null) {
                    tmpPath = Files.createTempFile(FilenameUtils.getBaseName(contentItem.getFilename()),
                            FilenameUtils.getExtension(contentItem.getFilename()));
                    Files.copy(contentItem.getInputStream(), tmpPath, StandardCopyOption.REPLACE_EXISTING);
                    size = Files.size(tmpPath);
                    tmpContentPaths.put(contentItem.getId(), tmpPath);
                } else {
                    throw new IngestException("Could not copy bytes of content message.  Message was NULL.");
                }
            } catch (IOException e) {
                if (tmpPath != null) {
                    FileUtils.deleteQuietly(tmpPath.toFile());
                }
                throw new IngestException("Could not copy bytes of content message.", e);
            } finally {
                IOUtils.closeQuietly(contentItem.getInputStream());
            }
            String mimeTypeRaw = contentItem.getMimeTypeRawData();
            mimeTypeRaw = guessMimeType(mimeTypeRaw, contentItem.getFilename(), tmpPath);

            String fileName = updateFileExtension(mimeTypeRaw, contentItem.getFilename());
            Metacard metacard = generateMetacard(mimeTypeRaw, contentItem.getId(), fileName, size,
                    (Subject) storageRequest.getProperties().get(SecurityConstants.SECURITY_SUBJECT), tmpPath);
            metacardMap.put(metacard.getId(), metacard);

            ContentItem generatedContentItem = new ContentItemImpl(metacard.getId(),
                    com.google.common.io.Files.asByteSource(tmpPath.toFile()), mimeTypeRaw, fileName, size,
                    metacard);
            contentItems.add(generatedContentItem);
        } catch (Exception e) {
            tmpContentPaths.values().stream().forEach(path -> FileUtils.deleteQuietly(path.toFile()));
            tmpContentPaths.clear();
            throw new IngestException("Could not create metacard.", e);
        }
    }
}

From source file:com.spectralogic.ds3client.integration.GetJobManagement_Test.java

private void doReadJobWithJobStarter(final ReadJobStarter readJobStarter) throws IOException,
        URISyntaxException, NoSuchMethodException, IllegalAccessException, InvocationTargetException {
    final String tempPathPrefix = null;
    final Path tempDirectory = Files.createTempDirectory(Paths.get("."), tempPathPrefix);

    try {// w  ww .  j  ava2 s  .c  o m
        final String DIR_NAME = "largeFiles/";
        final String FILE_NAME = "lesmis.txt";

        final Path objPath = ResourceUtils.loadFileResource(DIR_NAME + FILE_NAME);
        final long bookSize = Files.size(objPath);
        final Ds3Object obj = new Ds3Object(FILE_NAME, bookSize);

        final Ds3ClientShim ds3ClientShim = new Ds3ClientShim((Ds3ClientImpl) client);

        final int maxNumBlockAllocationRetries = 1;
        final int maxNumObjectTransferAttempts = 3;
        final Ds3ClientHelpers ds3ClientHelpers = Ds3ClientHelpers.wrap(ds3ClientShim,
                maxNumBlockAllocationRetries, maxNumObjectTransferAttempts);

        final Ds3ClientHelpers.Job readJob = readJobStarter.startReadJob(ds3ClientHelpers, BUCKET_NAME,
                Arrays.asList(obj));

        final AtomicBoolean dataTransferredEventReceived = new AtomicBoolean(false);
        final AtomicBoolean objectCompletedEventReceived = new AtomicBoolean(false);
        final AtomicBoolean checksumEventReceived = new AtomicBoolean(false);
        final AtomicBoolean metadataEventReceived = new AtomicBoolean(false);
        final AtomicBoolean waitingForChunksEventReceived = new AtomicBoolean(false);
        final AtomicBoolean failureEventReceived = new AtomicBoolean(false);

        readJob.attachDataTransferredListener(new DataTransferredListener() {
            @Override
            public void dataTransferred(final long size) {
                dataTransferredEventReceived.set(true);
                assertEquals(bookSize, size);
            }
        });
        readJob.attachObjectCompletedListener(new ObjectCompletedListener() {
            @Override
            public void objectCompleted(final String name) {
                objectCompletedEventReceived.set(true);
            }
        });
        readJob.attachChecksumListener(new ChecksumListener() {
            @Override
            public void value(final BulkObject obj, final ChecksumType.Type type, final String checksum) {
                checksumEventReceived.set(true);
                assertEquals("69+JXWeZuzl2HFTM6Lbo8A==", checksum);
            }
        });
        readJob.attachMetadataReceivedListener(new MetadataReceivedListener() {
            @Override
            public void metadataReceived(final String filename, final Metadata metadata) {
                metadataEventReceived.set(true);
            }
        });
        readJob.attachWaitingForChunksListener(new WaitingForChunksListener() {
            @Override
            public void waiting(final int secondsToWait) {
                waitingForChunksEventReceived.set(true);
            }
        });
        readJob.attachFailureEventListener(new FailureEventListener() {
            @Override
            public void onFailure(final FailureEvent failureEvent) {
                failureEventReceived.set(true);
            }
        });

        readJob.transfer(new FileObjectGetter(tempDirectory));

        final File originalFile = ResourceUtils.loadFileResource(DIR_NAME + FILE_NAME).toFile();
        final File fileCopiedFromBP = Paths.get(tempDirectory.toString(), FILE_NAME).toFile();
        assertTrue(FileUtils.contentEquals(originalFile, fileCopiedFromBP));

        assertTrue(dataTransferredEventReceived.get());
        assertTrue(objectCompletedEventReceived.get());
        assertTrue(checksumEventReceived.get());
        assertTrue(metadataEventReceived.get());
        assertFalse(waitingForChunksEventReceived.get());
        assertFalse(failureEventReceived.get());
    } finally {
        FileUtils.deleteDirectory(tempDirectory.toFile());
    }
}

From source file:com.spectralogic.ds3client.integration.Smoke_Test.java

@Test
public void partialGetWithBookOverChunkBoundry()
        throws IOException, XmlProcessingException, URISyntaxException {
    final String bucketName = "partialGetOnBook";
    final Path filePath = Files.createTempFile("ds3", "lesmis-copies.txt");
    LOG.info("TempFile for partial get of book: " + filePath.toAbsolutePath().toString());

    try {/*  ww  w  . ja  va 2  s. co m*/

        HELPERS.ensureBucketExists(bucketName, envDataPolicyId);

        final List<Ds3Object> putObjects = Lists.newArrayList(new Ds3Object("lesmis-copies.txt", 13290604));

        final Ds3ClientHelpers.Job putJob = HELPERS.startWriteJob(bucketName, putObjects, WriteJobOptions
                .create().withMaxUploadSize(PutBulkJobSpectraS3Request.MIN_UPLOAD_SIZE_IN_BYTES));

        putJob.transfer(new ResourceObjectPutter("largeFiles/"));

        final List<Ds3Object> getObjects = Lists.newArrayList();
        getObjects.add(new PartialDs3Object("lesmis-copies.txt", Range.byLength(1048476, 200)));

        final Ds3ClientHelpers.Job getJob = HELPERS.startReadJob(bucketName, getObjects);

        getJob.transfer(new Ds3ClientHelpers.ObjectChannelBuilder() {
            @Override
            public SeekableByteChannel buildChannel(final String key) throws IOException {
                return Files.newByteChannel(filePath, StandardOpenOption.WRITE, StandardOpenOption.CREATE);
            }
        });

        final Path expectedResultPath = Paths.get(Smoke_Test.class.getResource("/largeFiles/output").toURI());

        assertThat(Files.size(filePath), is(200L));
        final String partialFile = new String(Files.readAllBytes(filePath), Charset.forName("UTF-8"));
        final String expectedResult = new String(Files.readAllBytes(expectedResultPath),
                Charset.forName("UTF-8"));
        assertThat(partialFile, is(expectedResult.substring(0, expectedResult.length() - 1))); // need the trim to remove a newline that is added by the os
    } finally {
        deleteAllContents(client, bucketName);
        Files.delete(filePath);
    }
}

From source file:org.dcm4chee.storage.test.unit.tar.TarContainerProviderTest.java

@Test
public void testWriteEntriesTo() throws Exception {
    Path srcEntryPath = createFile(ENTRY, ENTRY_FILE);
    Path targetTarPath = dir.getPath().resolve(NAME);
    try (OutputStream out = Files.newOutputStream(targetTarPath)) {
        provider.writeEntriesTo(storageCtx, makeEntries(srcEntryPath), out);
    }// w  ww.  j a  va  2  s.  c o  m
    assertEquals(TAR.length, Files.size(targetTarPath));
    try (TarArchiveInputStream expectedTar = new TarArchiveInputStream(new ByteArrayInputStream(TAR));
            TarArchiveInputStream actualTar = new TarArchiveInputStream(Files.newInputStream(targetTarPath))) {
        assertTarEquals(expectedTar, actualTar);
    }
}

From source file:org.apache.nifi.controller.StandardFlowService.java

@Override
public void copyCurrentFlow(final OutputStream os) throws IOException {
    readLock.lock();/* w ww.  j av  a 2s.c o  m*/
    try {
        if (!Files.exists(flowXml) || Files.size(flowXml) == 0) {
            return;
        }

        try (final InputStream in = Files.newInputStream(flowXml, StandardOpenOption.READ);
                final InputStream gzipIn = new GZIPInputStream(in)) {
            FileUtils.copy(gzipIn, os);
        }
    } finally {
        readLock.unlock();
    }
}

From source file:com.spectralogic.ds3client.integration.Smoke_Test.java

@Test
public void attachDataTransferredListenerTest() throws IOException, URISyntaxException, XmlProcessingException {
    final String bucketName = "test_attachDataTransferredListener";
    try {//w w w  .java 2 s.c  om
        HELPERS.ensureBucketExists(bucketName, envDataPolicyId);

        final List<Ds3Object> objects = new ArrayList<>();
        long booksSize = 0;
        for (final String book : BOOKS) {
            final Path objPath = ResourceUtils.loadFileResource(RESOURCE_BASE_NAME + book);
            final long bookSize = Files.size(objPath);
            booksSize += bookSize;
            final Ds3Object obj = new Ds3Object(book, bookSize);

            objects.add(obj);
        }

        final Ds3ClientHelpers.Job job = HELPERS.startWriteJob(bucketName, objects);
        final TransferredListener transferredListener = new TransferredListener();

        job.attachObjectCompletedListener(transferredListener);
        job.attachDataTransferredListener(transferredListener);

        job.transfer(new ResourceObjectPutter(RESOURCE_BASE_NAME));

        assertThat(transferredListener.getTotalBytes(), is(booksSize));
        assertThat(transferredListener.getNumberOfFiles(), is(BOOKS.length));

    } finally {
        deleteAllContents(client, bucketName);
    }
}

From source file:org.opencb.cellbase.app.transform.VariationParser.java

private boolean isEmpty(String fileName) throws IOException {
    if (Files.exists(Paths.get(fileName))) {
        return Files.size(Paths.get(fileName)) == 0;
    } else {/*  w ww.  j a v  a  2s.co  m*/
        return Files.size(Paths.get(fileName + ".gz")) == 0;
    }
}