Example usage for java.nio.file Files size

List of usage examples for java.nio.file Files size

Introduction

In this page you can find the example usage for java.nio.file Files size.

Prototype

public static long size(Path path) throws IOException 

Source Link

Document

Returns the size of a file (in bytes).

Usage

From source file:us.fatehi.schemacrawler.webapp.service.storage.FileSystemStorageService.java

/**
 * {@inheritDoc}/*from w  w  w. j a  v a  2  s.  c  om*/
 */
@Override
public void store(final InputStreamSource streamSource, final String filenameKey,
        final FileExtensionType extension) throws Exception {
    validateFilenameKey(filenameKey);
    if (streamSource == null || extension == null) {
        throw new Exception(String.format("Failed to store file %s%s", filenameKey, extension));
    }

    // Save stream to a file
    final Path filePath = storageRoot.resolve(filenameKey + "." + extension.getExtension());
    copy(streamSource.getInputStream(), filePath);

    // Check that the file is not empty
    if (Files.size(filePath) == 0) {
        Files.delete(filePath);
        throw new Exception(String.format("No data for file %s.%s", filenameKey, extension));
    }
}

From source file:com.liferay.sync.engine.document.library.event.DownloadFileEvent.java

@Override
protected void processRequest() throws Exception {
    SyncFile syncFile = (SyncFile) getParameterValue("syncFile");

    Path filePath = Paths.get(syncFile.getFilePathName());

    syncFile.setPreviousModifiedTime(FileUtil.getLastModifiedTime(filePath));

    syncFile.setState(SyncFile.STATE_IN_PROGRESS);
    syncFile.setUiEvent(SyncFile.UI_EVENT_DOWNLOADING);

    SyncFileService.update(syncFile);//from w  w  w.j av  a 2  s. co  m

    if ((boolean) getParameterValue("batch")) {
        BatchDownloadEvent batchDownloadEvent = BatchEventManager.getBatchDownloadEvent(syncFile);

        if (batchDownloadEvent.addEvent(this)) {
            return;
        }
    }

    StringBuilder sb = new StringBuilder();

    SyncAccount syncAccount = SyncAccountService.fetchSyncAccount(getSyncAccountId());

    String url = ServerUtil.getDownloadURL(syncAccount.getSyncAccountId(), syncAccount.getUrl());

    sb.append(url);

    sb.append(getURLPath());
    sb.append("/");
    sb.append(syncFile.getRepositoryId());
    sb.append("/");
    sb.append(syncFile.getTypeUuid());

    if ((boolean) getParameterValue("patch")) {
        sb.append("?patch=true&sourceVersionId=");
        sb.append(getParameterValue("sourceVersionId"));
        sb.append("&targetVersionId=");
        sb.append(getParameterValue("targetVersionId"));
    } else {
        sb.append("?version=");
        sb.append(syncFile.getVersion());
        sb.append("&versionId=");
        sb.append(syncFile.getVersionId());
    }

    HttpGet httpGet = new HttpGet(sb.toString());

    Path tempFilePath = FileUtil.getTempFilePath(syncFile);

    if (ServerInfo.supportsPartialDownloads(getSyncAccountId()) && FileUtil.exists(tempFilePath)) {

        long size = Files.size(tempFilePath);

        if (syncFile.getSize() > size) {
            httpGet.setHeader("Range", "bytes=" + Files.size(tempFilePath) + "-");
        }
    }

    executeAsynchronousGet(httpGet);
}

From source file:org.eclipse.packagedrone.utils.rpm.build.PayloadRecorder.java

public Result addFile(final String targetPath, final Path path, final Consumer<CpioArchiveEntry> customizer)
        throws IOException {
    final long size = Files.size(path);

    final CpioArchiveEntry entry = new CpioArchiveEntry(targetPath);
    entry.setSize(size);/*www .j a  v  a  2s.c o m*/

    if (customizer != null) {
        customizer.accept(entry);
    }

    this.archiveStream.putArchiveEntry(entry);

    MessageDigest digest;
    try {
        digest = createDigest();
    } catch (final NoSuchAlgorithmException e) {
        throw new IOException(e);
    }

    try (InputStream in = new BufferedInputStream(Files.newInputStream(path))) {
        ByteStreams.copy(new DigestInputStream(in, digest), this.archiveStream);
    }

    this.archiveStream.closeArchiveEntry();

    return new Result(size, digest.digest());
}

From source file:io.undertow.server.handlers.file.FileHandlerTestCase.java

@Test
public void testHeadRequest() throws IOException, URISyntaxException {
    TestHttpClient client = new TestHttpClient();
    Path file = Paths.get(getClass().getResource("page.html").toURI());
    Path rootPath = file.getParent();
    try {/*from   w w w  .j  ava 2 s  .  c o  m*/
        DefaultServer.setRootHandler(new CanonicalPathHandler().setNext(new PathHandler().addPrefixPath("/path",
                new ResourceHandler(new PathResourceManager(rootPath, 10485760))
                        .setDirectoryListingEnabled(true))));

        HttpHead get = new HttpHead(DefaultServer.getDefaultServerURL() + "/path/page.html");
        HttpResponse result = client.execute(get);
        Assert.assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode());
        Assert.assertEquals(Long.toString(Files.size(file)),
                result.getHeaders(Headers.CONTENT_LENGTH_STRING)[0].getValue());
        Header[] headers = result.getHeaders("Content-Type");
        Assert.assertEquals("text/html", headers[0].getValue());

    } finally {
        client.getConnectionManager().shutdown();
    }
}

From source file:org.apache.archiva.indexer.maven.MavenIndexManagerTest.java

@Test
public void pack() throws Exception {
    createTestContext();//from   w  w w . j a  v  a 2 s .  c  o  m
    Path destDir = repository.getLocalPath().resolve("org/apache/archiva/archiva-webapp/1.0");
    Path srcDir = Paths.get("src/test/maven-search-test-repo/org/apache/archiva/archiva-webapp/1.0");
    org.apache.commons.io.FileUtils.copyDirectory(srcDir.toFile(), destDir.toFile());
    mavenIndexManager.scan(ctx);
    mavenIndexManager.pack(ctx);
    assertTrue(Files.list(indexPath).filter(path -> {
        try {
            return path.getFileName().toString().endsWith(".gz") && Files.size(path) > 0;
        } catch (IOException e) {
            return false;
        }
    }).findAny().isPresent());
}

From source file:fr.duminy.jbackup.core.archive.Compressor.java

public void compress(ArchiveParameters archiveParameters, List<SourceWithPath> files,
        final TaskListener listener, Cancellable cancellable) throws ArchiveException {
    final String name = archiveParameters.getArchive().toString();
    final MutableLong processedSize = new MutableLong();

    try (OutputStream fos = Files.newOutputStream(archiveParameters.getArchive());
            ArchiveOutputStream output = factory.create(fos)) {
        LOG.info("Backup '{}': creating archive {}", name, archiveParameters.getArchive());
        for (final SourceWithPath file : files) {
            if ((cancellable != null) && cancellable.isCancelled()) {
                break;
            }// www .ja  v a  2  s.com

            LOG.info("Backup '{}': compressing file {}", name, file.getPath().toAbsolutePath());
            try (InputStream input = createCountingInputStream(listener, processedSize,
                    Files.newInputStream(file.getPath()))) {
                final String path;
                if (archiveParameters.isRelativeEntries()) {
                    Path source = file.getSource();
                    if (Files.isDirectory(source)) {
                        if (source.getParent() == null) {
                            path = source.relativize(file.getPath()).toString();
                        } else {
                            path = source.getParent().relativize(file.getPath()).toString();
                        }
                    } else {
                        path = file.getPath().getFileName().toString();
                    }
                } else {
                    path = file.getPath().toString();
                }
                LOG.info("Backup '{}': adding entry {}", new Object[] { name, path });
                output.addEntry(path, input);
            }
        }
        LOG.info("Backup '{}': archive {} created ({})", new Object[] { name, archiveParameters.getArchive(),
                FileUtils.byteCountToDisplaySize(Files.size(archiveParameters.getArchive())) });
    } catch (IOException e) {
        throw new ArchiveException(e);
    } catch (Exception e) {
        throw new ArchiveException(e);
    }
}

From source file:org.wte4j.examples.showcase.server.hsql.ShowCaseDbInitializerTest.java

@Test
public void createDatabaseFilesWithOveride() throws IOException, SQLException {
    ApplicationContext context = new StaticApplicationContext();
    Path directory = Files.createTempDirectory("database");
    Path dummyFile = directory.resolve("wte4j-showcase.script");
    Files.createFile(dummyFile);/*from   w w w .j  a va 2s . c o  m*/

    try {

        ShowCaseDbInitializer showCaseDbInitializer = new ShowCaseDbInitializer(context);
        showCaseDbInitializer.createDateBaseFiles(directory, true);

        Set<String> fileNamesInDirectory = listFiles(directory);
        Set<String> expectedFileNames = new HashSet<String>();
        expectedFileNames.add("wte4j-showcase.lobs");
        expectedFileNames.add("wte4j-showcase.properties");
        expectedFileNames.add("wte4j-showcase.script");
        assertEquals(expectedFileNames, fileNamesInDirectory);
        assertTrue(Files.size(dummyFile) > 0);

    } finally {
        deleteDirectory(directory);
    }
}

From source file:com.qwazr.library.archiver.ArchiverTool.java

public void decompress(final Path source, final Path destFile) throws IOException, CompressorException {
    if (Files.exists(destFile) && Files.size(destFile) > 0)
        throw new IOException("The file already exists: " + destFile.toAbsolutePath());
    try (final InputStream input = getCompressorNewInputStream(
            new BufferedInputStream(Files.newInputStream(source)))) {
        IOUtils.copy(input, destFile);//  w w  w .j  av  a2s  .  com
    } catch (IOException e) {
        throw new IOException("Unable to decompress the file: " + source.toAbsolutePath(), e);
    }
}

From source file:com.github.horrorho.inflatabledonkey.pcs.xfile.FileAssembler.java

static void truncate(Path file, long to) throws UncheckedIOException {
    // TODO should really limit our written data stream.
    try {/*from   w  ww . jav  a 2  s  . co  m*/
        if (to == 0) {
            return;
        }

        long size = Files.size(file);
        if (size > to) {
            Files.newByteChannel(file, WRITE).truncate(to).close();
            logger.debug("-- truncate() - truncated: {}, {} > {}", file, size, to);

        } else if (size < to) {
            logger.warn("-- truncate() - cannot truncate: {}, {} > {}", file, size, to);
        }
    } catch (IOException ex) {
        throw new UncheckedIOException(ex);
    }
}

From source file:com.spectralogic.ds3client.integration.DataIntegrity_Test.java

@Test
public void singleFilePut() throws IOException, URISyntaxException, XmlProcessingException, SignatureException {
    final String bucketName = "single_file_put_test";
    final String book = "beowulf.txt";

    try {//  w w  w  . j av a  2s  .  co  m
        HELPERS.ensureBucketExists(bucketName, envDataPolicyId);

        final Path objPath = ResourceUtils.loadFileResource(Util.RESOURCE_BASE_NAME + book);
        final String digest = DigestUtils.sha256Hex(Files.newInputStream(objPath));
        final Ds3Object obj = new Ds3Object(book, Files.size(objPath));

        final Ds3ClientHelpers.Job putJob = HELPERS.startWriteJob(bucketName, Lists.newArrayList(obj));
        putJob.transfer(new ResourceObjectPutter(Util.RESOURCE_BASE_NAME));

        final Path tempDir = Files.createTempDirectory("ds3_test_");

        final Ds3ClientHelpers.Job getJob = HELPERS.startReadAllJob(bucketName);
        getJob.transfer(new FileObjectGetter(tempDir));

        final String secondDigest = DigestUtils.sha256Hex(Files.newInputStream(tempDir.resolve(book)));
        assertThat(secondDigest, is(equalTo(digest)));
    } finally {
        Util.deleteAllContents(client, bucketName);
    }
}