Example usage for java.nio.file Files write

List of usage examples for java.nio.file Files write

Introduction

In this page you can find the example usage for java.nio.file Files write.

Prototype

public static Path write(Path path, Iterable<? extends CharSequence> lines, OpenOption... options)
        throws IOException 

Source Link

Document

Write lines of text to a file.

Usage

From source file:org.fim.internal.hash.FileHasherPerformanceTest.java

private Path createFileWithSize(int fileCount, int fileSize) throws IOException {
    Path newFile = context.getRepositoryRootDir().resolve("file_" + fileCount);
    if (Files.exists(newFile)) {
        Files.delete(newFile);//  w w  w .j av  a 2s.c o  m
    }

    if (fileSize == 0) {
        Files.createFile(newFile);
        return newFile;
    }

    try (ByteArrayOutputStream out = new ByteArrayOutputStream(fileSize)) {
        int contentSize = _1_KB / 4;
        int remaining = fileSize;
        for (; remaining > 0; globalSequenceCount++) {
            int size = min(contentSize, remaining);
            byte[] content = generateContent(globalSequenceCount, size);
            remaining -= size;
            out.write(content);
        }

        byte[] fileContent = out.toByteArray();
        assertThat(fileContent.length).isEqualTo(fileSize);
        Files.write(newFile, fileContent, CREATE);
    }

    return newFile;
}

From source file:com.netflix.genie.agent.cli.ResolveJobSpecCommand.java

@Override
public ExitCode run() {
    log.info("Resolving job specification");

    final ObjectMapper prettyJsonMapper = GenieObjectMapper.getMapper().copy() // Don't reconfigure the shared mapper
            .enable(SerializationFeature.INDENT_OUTPUT);

    final JobSpecification spec;
    final String jobId = resolveJobSpecCommandArguments.getSpecificationId();
    if (!StringUtils.isBlank(jobId)) {
        // Do a specification lookup if an id is given
        log.info("Looking up specification of job {}", jobId);
        try {//  w w  w . jav  a 2s .c  om
            spec = agentJobService.getJobSpecification(jobId);
        } catch (final JobSpecificationResolutionException e) {
            throw new RuntimeException("Failed to get spec: " + jobId, e);
        }

    } else {
        // Compose a job request from argument
        final AgentJobRequest agentJobRequest;
        try {
            final ArgumentDelegates.JobRequestArguments jobArgs = resolveJobSpecCommandArguments
                    .getJobRequestArguments();
            agentJobRequest = jobRequestConverter.agentJobRequestArgsToDTO(jobArgs);
        } catch (final JobRequestConverter.ConversionException e) {
            throw new RuntimeException("Failed to construct job request from arguments", e);
        }

        // Print request
        if (!resolveJobSpecCommandArguments.isPrintRequestDisabled()) {
            try {
                System.out.println(prettyJsonMapper.writeValueAsString(agentJobRequest));
            } catch (final JsonProcessingException e) {
                throw new RuntimeException("Failed to map request to JSON", e);
            }
        }

        // Resolve via service
        try {
            spec = agentJobService.resolveJobSpecificationDryRun(agentJobRequest);
        } catch (final JobSpecificationResolutionException e) {
            throw new RuntimeException("Failed to resolve job specification", e);
        }
    }

    // Translate response to JSON
    final String specJsonString;
    try {
        specJsonString = prettyJsonMapper.writeValueAsString(spec);
    } catch (final JsonProcessingException e) {
        throw new RuntimeException("Failed to map specification to JSON", e);
    }

    // Print specification
    System.out.println(specJsonString);

    // Write specification to file
    final File outputFile = resolveJobSpecCommandArguments.getOutputFile();
    if (outputFile != null) {
        try {
            Files.write(outputFile.toPath(), specJsonString.getBytes(StandardCharsets.UTF_8),
                    StandardOpenOption.CREATE_NEW);
        } catch (final IOException e) {
            throw new RuntimeException("Failed to write request to: " + outputFile.getAbsolutePath(), e);
        }
    }

    return ExitCode.SUCCESS;
}

From source file:com.evolveum.midpoint.provisioning.impl.manual.TestSemiManual.java

private void appendToCsv(String[] data) throws IOException {
    String line = formatCsvLine(data) + "\n";
    Files.write(Paths.get(CSV_TARGET_FILE.getPath()), line.getBytes(), StandardOpenOption.APPEND);
}

From source file:net.di2e.ecdr.describe.commands.GenerateDescribeCommand.java

protected void writeToFile(String sourceId, String xml) {
    try {/*from  w w  w .j a va 2 s. c  o m*/
        String filename = sourceId + "-describe-" + (System.currentTimeMillis() / 1000) + ".xml";
        Files.write(Paths.get(DESCRIBE_DIR, filename), xml.getBytes(), StandardOpenOption.CREATE);
        console.println("New describe file written to DDF_HOME/" + DESCRIBE_DIR + ": " + filename);
    } catch (IOException e) {
        LOGGER.error("Could not write describe file for source {}", sourceId, e);
    }
}

From source file:org.epics.archiverappliance.etl.ZeroByteFilesTest.java

public void testZeroByteFileInDest() throws Exception {
    String pvName = ConfigServiceForTests.ARCH_UNIT_TEST_PVNAME_PREFIX + "ETL_testZeroDest";
    // Create an zero byte file in the ETL dest
    VoidFunction zeroByteGenerator = () -> {
        Path zeroDestPath = Paths.get(etlDest.getRootFolder(), pvNameToKeyConverter.convertPVNameToKey(pvName)
                + currentYear + PlainPBStoragePlugin.PB_EXTENSION);
        logger.info("Creating zero byte file " + zeroDestPath);
        Files.write(zeroDestPath, new byte[0], StandardOpenOption.CREATE);
    };//ww w . j a  va  2 s  .  c  o  m
    runETLAndValidate(pvName, zeroByteGenerator);
}

From source file:ch.puzzle.itc.mobiliar.business.utils.SecureFileLoaderTest.java

@Test
public void testLoadFileFromFileSystem() throws IOException, IllegalAccessException {
    String s = "Hello" + System.lineSeparator() + "World" + System.lineSeparator() + "How are you?";
    Files.write(f, Arrays.asList(s.split(System.lineSeparator())), StandardCharsets.UTF_8);
    String result = fileLoader.loadFileFromFileSystem(dir.toString(), f.toString());

    Assert.assertEquals(s, result);/*www  . ja v a2 s  .  c  o  m*/

}

From source file:com.evolveum.midpoint.provisioning.impl.manual.TestSemiManual.java

private void replaceInCsv(String[] data) throws IOException {
    List<String> lines = Files.readAllLines(Paths.get(CSV_TARGET_FILE.getPath()));
    for (int i = 0; i < lines.size(); i++) {
        String line = lines.get(i);
        String[] cols = line.split(",");
        if (cols[0].matches("\"" + data[0] + "\"")) {
            lines.set(i, formatCsvLine(data));
        }/*from   ww w  .j  av a 2s  . com*/
    }
    Files.write(Paths.get(CSV_TARGET_FILE.getPath()), lines, StandardOpenOption.WRITE);
}

From source file:com.evolveum.midpoint.model.intest.manual.CsvBackingStore.java

protected void appendToCsv(String[] data) throws IOException {
    String line = formatCsvLine(data);
    Files.write(Paths.get(CSV_TARGET_FILE.getPath()), line.getBytes(), StandardOpenOption.APPEND);
}

From source file:com.ikanow.aleph2.security.utils.LdifExportUtil.java

public void exportToLdif(String outPath) {
    // copy Template
    Path pOutPath = Paths.get(outPath);

    try {/*from  www  .  ja v  a2s. c om*/
        Files.copy(this.getClass().getResourceAsStream("aleph2_template.ldif"), pOutPath,
                StandardCopyOption.REPLACE_EXISTING);
        String userEntries = createUserEntries();

        Files.write(pOutPath, userEntries.getBytes(), StandardOpenOption.APPEND);
    } catch (Exception e) {
        logger.error(e);
    }
}

From source file:org.apache.streams.plugins.StreamsScalaSourceGenerator.java

private void writeFile(String pojoFile, String pojoScala) {
    try {/*  w  ww . jav  a 2s.  co m*/
        File path = new File(pojoFile);
        File dir = path.getParentFile();
        if (!dir.exists()) {
            dir.mkdirs();
        }
        Files.write(Paths.get(pojoFile), pojoScala.getBytes(), StandardOpenOption.CREATE_NEW);
    } catch (Exception ex) {
        LOGGER.error("Write Exception: {}", ex);
    }
}