Example usage for java.nio.file Files newOutputStream

List of usage examples for java.nio.file Files newOutputStream

Introduction

In this page you can find the example usage for java.nio.file Files newOutputStream.

Prototype

public static OutputStream newOutputStream(Path path, OpenOption... options) throws IOException 

Source Link

Document

Opens or creates a file, returning an output stream that may be used to write bytes to the file.

Usage

From source file:com.facebook.buck.cxx.ArchiveStepIntegrationTest.java

@Test
public void thinArchives() throws IOException, InterruptedException {
    assumeTrue(Platform.detect() == Platform.MACOS || Platform.detect() == Platform.LINUX);
    ProjectFilesystem filesystem = new ProjectFilesystem(tmp.getRoot());
    CxxPlatform platform = CxxPlatformUtils.build(new CxxBuckConfig(FakeBuckConfig.builder().build()));
    assumeTrue(platform.getAr().supportsThinArchives());

    // Build up the paths to various files the archive step will use.
    SourcePathResolver sourcePathResolver = new SourcePathResolver(new SourcePathRuleFinder(
            new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer())));
    Archiver archiver = platform.getAr();

    Path output = filesystem.getPath("foo/libthin.a");
    filesystem.mkdirs(output.getParent());

    // Create a really large input file so it's obvious that the archive is thin.
    Path input = filesystem.getPath("bar/blah.dat");
    filesystem.mkdirs(input.getParent());
    byte[] largeInputFile = new byte[1024 * 1024];
    byte[] fillerToRepeat = "hello\n".getBytes(StandardCharsets.UTF_8);
    for (int i = 0; i < largeInputFile.length; i++) {
        largeInputFile[i] = fillerToRepeat[i % fillerToRepeat.length];
    }/*from  w w  w.  j av  a2 s  .c  o  m*/
    filesystem.writeBytesToPath(largeInputFile, input);

    // Build an archive step.
    ArchiveStep archiveStep = new ArchiveStep(filesystem, archiver.getEnvironment(),
            archiver.getCommandPrefix(sourcePathResolver), ImmutableList.of(), getArchiveOptions(true), output,
            ImmutableList.of(input), archiver);

    // Execute the archive step and verify it ran successfully.
    ExecutionContext executionContext = TestExecutionContext.newInstance();
    TestConsole console = (TestConsole) executionContext.getConsole();
    int exitCode = archiveStep.execute(executionContext).getExitCode();
    assertEquals("archive step failed: " + console.getTextWrittenToStdErr(), 0, exitCode);

    // Verify that the thin header is present.
    assertThat(filesystem.readFirstLine(output), Matchers.equalTo(Optional.of("!<thin>")));

    // Verify that even though the archived contents is really big, the archive is still small.
    assertThat(filesystem.getFileSize(output), Matchers.lessThan(1000L));

    // NOTE: Replace the thin header with a normal header just so the commons compress parser
    // can parse the archive contents.
    try (OutputStream outputStream = Files.newOutputStream(filesystem.resolve(output),
            StandardOpenOption.WRITE)) {
        outputStream.write(ObjectFileScrubbers.GLOBAL_HEADER);
    }

    // Now read the archive entries and verify that the timestamp, UID, and GID fields are
    // zero'd out.
    try (ArArchiveInputStream stream = new ArArchiveInputStream(
            new FileInputStream(filesystem.resolve(output).toFile()))) {
        ArArchiveEntry entry = stream.getNextArEntry();

        // Verify that the input names are relative paths from the outputs parent dir.
        assertThat(entry.getName(), Matchers.equalTo(output.getParent().relativize(input).toString()));
    }
}

From source file:de.elomagic.carafile.client.CaraFileClient.java

/**
 * Downloads a file into a {@link OutputStream}.
 *
 * @param md {@link MetaData} of the file.
 * @param out The output stream. It's not recommended to use a buffered stream.
 * @throws IOException Thrown when unable to write file into the output stream or the SHA-1 validation failed.
 *///from w  w w  . ja  v a2s .  co  m
public void downloadFile(final MetaData md, final OutputStream out) throws IOException {
    if (md == null) {
        throw new IllegalArgumentException("Parameter 'md' must not be null!");
    }

    if (out == null) {
        throw new IllegalArgumentException("Parameter 'out' must not be null!");
    }

    Map<String, Path> downloadedChunks = new HashMap<>();
    Set<String> chunksToDownload = new HashSet<>();
    for (ChunkData chunkData : md.getChunks()) {
        chunksToDownload.add(chunkData.getId());
    }

    try {
        while (!chunksToDownload.isEmpty()) {
            PeerChunk pc = peerChunkSelector.getNext(md, chunksToDownload);
            if (pc == null || pc.getPeerURI() == null) {
                throw new IOException("No peer found or selected for download");
            }

            Path chunkFile = Files.createTempFile("fs_", ".tmp");
            try (OutputStream chunkOut = Files.newOutputStream(chunkFile, StandardOpenOption.APPEND)) {
                downloadShunk(pc, md, chunkOut);

                downloadedChunks.put(pc.getChunkId(), chunkFile);
                chunksToDownload.remove(pc.getChunkId());

                chunkOut.flush();
            } catch (Exception ex) {
                Files.deleteIfExists(chunkFile);
                throw ex;
            }
        }

        MessageDigest messageDigest = DigestUtils.getSha1Digest();

        // Write chunk on correct order to file.
        try (DigestOutputStream dos = new DigestOutputStream(out, messageDigest);
                BufferedOutputStream bos = new BufferedOutputStream(dos, md.getChunkSize())) {
            for (ChunkData chunk : md.getChunks()) {
                Path chunkPath = downloadedChunks.get(chunk.getId());
                Files.copy(chunkPath, bos);
            }
        }

        String sha1 = Hex.encodeHexString(messageDigest.digest());
        if (!sha1.equalsIgnoreCase(md.getId())) {
            throw new IOException(
                    "SHA1 validation of file failed. Expected " + md.getId() + " but was " + sha1);
        }
    } finally {
        for (Path path : downloadedChunks.values()) {
            try {
                Files.deleteIfExists(path);
            } catch (IOException ex) {
                LOG.error("Unable to delete chunk " + path.toString() + "; " + ex.getMessage(), ex);
            }
        }
    }
}

From source file:net.sf.jabref.gui.journals.ManageJournalsPanel.java

private void storeSettings() {
    Path filePath = null;//  w  w  w  .  j a v a2  s  .c  om
    if (newFile.isSelected()) {
        if (!newNameTf.getText().isEmpty()) {
            filePath = Paths.get(newNameTf.getText());
        }
    } else {
        filePath = Paths.get(personalFile.getText());

    }

    if (filePath != null) {
        try (OutputStream stream = Files.newOutputStream(filePath, StandardOpenOption.CREATE);
                OutputStreamWriter writer = new OutputStreamWriter(stream,
                        Globals.prefs.getDefaultEncoding())) {
            for (JournalEntry entry : tableModel.getJournals()) {
                writer.write(entry.getName());
                writer.write(" = ");
                writer.write(entry.getAbbreviation());
                writer.write(Globals.NEWLINE);
            }
        } catch (IOException e) {
            LOGGER.warn("Problem writing abbreviation file", e);
        }
        String filename = filePath.toString();
        if ("".equals(filename)) {
            filename = null;
        }
        Globals.prefs.put(JabRefPreferences.PERSONAL_JOURNAL_LIST, filename);
    }

    // Store the list of external files set up:
    List<String> extFiles = new ArrayList<>();
    for (ExternalFileEntry efe : externals) {
        if (!"".equals(efe.getValue())) {
            extFiles.add(efe.getValue());
        }
    }
    Globals.prefs.putStringList(JabRefPreferences.EXTERNAL_JOURNAL_LISTS, extFiles);

    // Update journal abbreviation loader
    Globals.journalAbbreviationLoader.update(JournalAbbreviationPreferences.fromPreferences(Globals.prefs));
}

From source file:org.apache.nifi.controller.repository.VolatileContentRepository.java

@Override
public long exportTo(final ContentClaim claim, final Path destination, final boolean append, final long offset,
        final long length) throws IOException {
    if (claim == null) {
        if (append) {
            return 0L;
        }/*from  w ww.j a  v  a2 s  .  c om*/
        Files.createFile(destination);
        return 0L;
    }

    final StandardOpenOption openOption = append ? StandardOpenOption.APPEND : StandardOpenOption.CREATE;
    try (final InputStream in = read(claim);
            final OutputStream destinationStream = Files.newOutputStream(destination, openOption)) {

        if (offset > 0) {
            StreamUtils.skip(in, offset);
        }

        StreamUtils.copy(in, destinationStream, length);
        return length;
    }
}

From source file:org.audiveris.omr.classifier.AbstractClassifier.java

/**
 * Store the norms based on training samples.
 *
 * @param root path to root of file system
 * @throws IOException if something goes wrong during IO operations
 */// w w  w . j a va2  s  .c  o m
protected void storeNorms(Path root) throws Exception {
    Path means = root.resolve(MEANS_ENTRY_NAME);
    try (DataOutputStream dos = new DataOutputStream(
            new BufferedOutputStream(Files.newOutputStream(means, CREATE)))) {
        Nd4j.write(norms.means, dos);
        dos.flush();
    }

    Path stds = root.resolve(STDS_ENTRY_NAME);
    try (DataOutputStream dos = new DataOutputStream(
            new BufferedOutputStream(Files.newOutputStream(stds, CREATE)))) {
        Nd4j.write(norms.stds, dos);
        dos.flush();
    }
}

From source file:org.neo4j.io.fs.FileUtils.java

public static OutputStream openAsOutputStream(Path path, boolean append) throws IOException {
    OpenOption[] options;/*from w  ww  .j  a v  a 2 s .co m*/
    if (append) {
        options = new OpenOption[] { CREATE, WRITE, APPEND };
    } else {
        options = new OpenOption[] { CREATE, WRITE };
    }
    return Files.newOutputStream(path, options);
}

From source file:com.themodernway.server.core.io.IO.java

public static final OutputStream toOutputStream(final Path path, final OpenOption... options)
        throws IOException {
    return Files.newOutputStream(CommonOps.requireNonNull(path), options);
}

From source file:org.elasticsearch.xpack.core.ssl.SSLConfigurationReloaderTests.java

/**
 * Tests the reloading of a keystore when there is an exception during reloading. An exception is caused by truncating the keystore
 * that is being monitored/*  w  w w  . j  a v a 2  s  . c  om*/
 */
public void testReloadingKeyStoreException() throws Exception {
    Path tempDir = createTempDir();
    Path keystorePath = tempDir.resolve("testnode.jks");
    Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks"),
            keystorePath);
    MockSecureSettings secureSettings = new MockSecureSettings();
    secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode");
    Settings settings = Settings.builder().put("xpack.ssl.keystore.path", keystorePath)
            .setSecureSettings(secureSettings).put("path.home", createTempDir()).build();
    Environment env = randomBoolean() ? null : TestEnvironment.newEnvironment(settings);
    final SSLService sslService = new SSLService(settings, env);
    final SSLConfiguration config = sslService.sslConfiguration(Settings.EMPTY);
    new SSLConfigurationReloader(settings, env, sslService, resourceWatcherService) {
        @Override
        void reloadSSLContext(SSLConfiguration configuration) {
            fail("reload should not be called! [keystore reload exception]");
        }
    };

    final SSLContext context = sslService.sslContextHolder(config).sslContext();

    // truncate the keystore
    try (OutputStream out = Files.newOutputStream(keystorePath, StandardOpenOption.TRUNCATE_EXISTING)) {
    }

    // we intentionally don't wait here as we rely on concurrency to catch a failure
    assertThat(sslService.sslContextHolder(config).sslContext(), sameInstance(context));
}

From source file:codes.thischwa.c5c.UserObjectProxy.java

public static java.nio.file.Path removeExif(java.nio.file.Path tempPath) {
    if (exifRemover == null)
        return tempPath;
    try {/*  ww w  .  j  a v a2  s.  co m*/
        String fileName = tempPath.toString();
        String ext = FilenameUtils.getExtension(fileName);

        java.nio.file.Path woExifPath = Paths.get(tempPath.toString() + "_woExif");
        boolean removed = exifRemover.removeExif(Files.newInputStream(tempPath),
                Files.newOutputStream(woExifPath, StandardOpenOption.CREATE_NEW), ext);
        logger.debug("potential exif data removed: {}", removed);
        return (removed) ? woExifPath : tempPath;
    } catch (IOException e) {
        logger.warn("Error while removing EXIF data.", e);
        return tempPath;
    }
}