Example usage for java.nio.file StandardOpenOption APPEND

List of usage examples for java.nio.file StandardOpenOption APPEND

Introduction

In this page you can find the example usage for java.nio.file StandardOpenOption APPEND.

Prototype

StandardOpenOption APPEND

To view the source code for java.nio.file StandardOpenOption APPEND.

Click Source Link

Document

If the file is opened for #WRITE access then bytes will be written to the end of the file rather than the beginning.

Usage

From source file:ai.susi.mind.SusiIdentity.java

/**
 * Add a cognition to the identity. This will cause that we forget cognitions after
 * the awareness threshold has passed.//from w  w w .  ja  va  2  s .c  o m
 * @param cognition
 * @return self
 */
public SusiIdentity add(SusiCognition cognition) {
    this.short_term_memory.learn(cognition);
    List<SusiCognition> forgottenCognitions = this.short_term_memory.limitAwareness(this.attention);
    forgottenCognitions.forEach(c -> this.long_term_memory.learn(c)); // TODO add a rule to memorize only the most important ones
    try {
        Files.write(this.memorydump.toPath(), UTF8.getBytes(cognition.getJSON().toString(0) + "\n"),
                StandardOpenOption.APPEND, StandardOpenOption.CREATE);
    } catch (JSONException | IOException e) {
        e.printStackTrace();
    }
    return this;
}

From source file:com.acmutv.ontoqa.tool.io.IOManager.java

/**
 * Appends string on a resource./*from   www  .j  a va 2s. c  o m*/
 * @param resource the resource to write on.
 * @param string the string to write.
 * @throws IOException when resource cannot be written.
 */
public static void appendResource(String resource, String string) throws IOException {
    Path path = FileSystems.getDefault().getPath(resource).toAbsolutePath();
    Files.write(path, string.getBytes(), StandardOpenOption.APPEND);
}

From source file:org.wso2.carbon.user.mgt.recorder.DefaultUserDeletionEventRecorder.java

private void writeToCustomFile(String path, String content) throws RecorderException {

    // Create the file if it does not exist. Open with write permission and append to the end.
    try (OutputStream outputStream = Files.newOutputStream(Paths.get(path), StandardOpenOption.CREATE,
            StandardOpenOption.WRITE, StandardOpenOption.APPEND)) {
        BufferedWriter bufferedWriter = new BufferedWriter(new OutputStreamWriter(outputStream));
        bufferedWriter.write(content);/*from  ww w.ja v a2  s  .c o m*/
        bufferedWriter.newLine();
        bufferedWriter.flush();
    } catch (IOException e) {
        throw new RecorderException("Error while writing content to the file.", e);
    }
}

From source file:com.reactive.hzdfs.io.MemoryMappedChunkHandler.java

@Override
public void writeNext(FileChunk chunk) throws IOException {
    log.debug("[writeNext] " + chunk);
    if (file == null) {
        initWriteFile(chunk);//from w  ww  .  j a v  a  2s.  c  o m
        oStream = FileChannel.open(file.toPath(), StandardOpenOption.CREATE, StandardOpenOption.WRITE,
                StandardOpenOption.APPEND);

        /*
         * From javadocs:
         * "The behavior of this method when the requested region is not completely contained within this channel's file is unspecified. 
         * Whether changes made to the content or size of the underlying file, by this program or another, are propagated to the buffer 
         * is unspecified. The rate at which changes to the buffer are propagated to the file is unspecified."
         * 
         * Initially this is a 0 byte file. So how do we write to a new file??
         */
        log.debug("mapping byte buffer for write");
        mapBuff = oStream.map(MapMode.READ_WRITE, 0, chunk.getFileSize());

        if (log.isDebugEnabled()) {
            debugInitialParams();
            log.debug("Writing to target file- " + file + ". Expecting chunks to receive- " + chunk.getSize());
        }
    }

    doAttribCheck(chunk);

    //this is probably unreachable
    if (!mapBuff.hasRemaining()) {
        position += mapBuff.position();
        unmap(mapBuff);
        mapBuff = oStream.map(MapMode.READ_WRITE, position, chunk.getFileSize());
    }

    mapBuff.put(chunk.getChunk());
    fileSize += chunk.getChunk().length;

    if (fileSize > chunk.getFileSize())
        throw new IOException(
                "File size [" + fileSize + "] greater than expected size [" + chunk.getFileSize() + "]");

}

From source file:org.apache.asterix.external.util.FeedLogManager.java

public synchronized void open() throws IOException {
    // read content of logs.
    BufferedReader reader = Files.newBufferedReader(
            Paths.get(dir.toAbsolutePath().toString() + File.separator + PROGRESS_LOG_FILE_NAME));
    String log = reader.readLine();
    while (log != null) {
        if (log.startsWith(END_PREFIX)) {
            completed.add(getSplitId(log));
        }/*ww  w  .  j av  a2  s  . com*/
        log = reader.readLine();
    }
    reader.close();

    progressLogger = Files.newBufferedWriter(
            Paths.get(dir.toAbsolutePath().toString() + File.separator + PROGRESS_LOG_FILE_NAME),
            StandardCharsets.UTF_8, StandardOpenOption.APPEND);
    errorLogger = Files.newBufferedWriter(
            Paths.get(dir.toAbsolutePath().toString() + File.separator + ERROR_LOG_FILE_NAME),
            StandardCharsets.UTF_8, StandardOpenOption.APPEND);
    recordLogger = Files.newBufferedWriter(
            Paths.get(dir.toAbsolutePath().toString() + File.separator + BAD_RECORDS_FILE_NAME),
            StandardCharsets.UTF_8, StandardOpenOption.APPEND);
}

From source file:org.apache.flink.client.CliFrontendAddressConfigurationTest.java

/**
 * Test that the CliFrontent is able to pick up the .yarn-properties file from a specified location.
 *//*from www .j av a  2s.c  o m*/
@Test
public void testYarnConfig() {
    try {
        File tmpFolder = folder.newFolder();
        String currentUser = System.getProperty("user.name");

        // copy reference flink-conf.yaml to temporary test directory and append custom configuration path.
        File confFile = new File(
                CliFrontendRunTest.class.getResource("/testconfigwithyarn/flink-conf.yaml").getFile());
        File testConfFile = new File(tmpFolder, "flink-conf.yaml");
        org.apache.commons.io.FileUtils.copyFile(confFile, testConfFile);
        String toAppend = "\nyarn.properties-file.location: " + tmpFolder;
        // append to flink-conf.yaml
        Files.write(testConfFile.toPath(), toAppend.getBytes(), StandardOpenOption.APPEND);
        // copy .yarn-properties-<username>
        File propertiesFile = new File(
                CliFrontendRunTest.class.getResource("/testconfigwithyarn/.yarn-properties").getFile());
        File testPropertiesFile = new File(tmpFolder, ".yarn-properties-" + currentUser);
        org.apache.commons.io.FileUtils.copyFile(propertiesFile, testPropertiesFile);

        // start CLI Frontend
        CliFrontend frontend = new CliFrontend(tmpFolder.getAbsolutePath());

        CommandLineOptions options = mock(CommandLineOptions.class);

        frontend.updateConfig(options);
        Configuration config = frontend.getConfiguration();

        checkJobManagerAddress(config, CliFrontendTestUtils.TEST_YARN_JOB_MANAGER_ADDRESS,
                CliFrontendTestUtils.TEST_YARN_JOB_MANAGER_PORT);
    } catch (Exception e) {
        e.printStackTrace();
        fail(e.getMessage());
    }
}

From source file:com.surevine.gateway.scm.git.jgit.TestUtility.java

public static LocalRepoBean createTestRepo() throws Exception {
    final String projectKey = "test_" + UUID.randomUUID().toString();
    final String repoSlug = "testRepo";
    final String remoteURL = "ssh://fake_url";
    final Path repoPath = Paths.get(PropertyUtil.getGitDir(), "local_scm", projectKey, repoSlug);
    Files.createDirectories(repoPath);
    final Repository repo = new FileRepository(repoPath.resolve(".git").toFile());
    repo.create();/*from  ww w. j  a v  a  2 s . c  om*/
    final StoredConfig config = repo.getConfig();
    config.setString("remote", "origin", "url", remoteURL);
    config.save();

    final LocalRepoBean repoBean = new LocalRepoBean();
    repoBean.setProjectKey(projectKey);
    repoBean.setSlug(repoSlug);
    repoBean.setLocalBare(false);
    repoBean.setSourcePartner("partner");

    final Git git = new Git(repo);

    for (int i = 0; i < 3; i++) {
        final String filename = "newfile" + i + ".txt";
        Files.write(repoPath.resolve(filename), Arrays.asList("Hello World"), StandardCharsets.UTF_8,
                StandardOpenOption.CREATE, StandardOpenOption.APPEND);
        git.add().addFilepattern(filename).call();
        git.commit().setMessage("Added " + filename).call();
    }

    git.checkout().setName("master").call();

    repo.close();
    return repoBean;
}

From source file:io.apiman.common.es.util.ApimanEmbeddedElastic.java

private void writeProcessId() throws IOException {
    try {//  w ww  .  ja va2s.  c  om
        // Create parent directory (i.e. ~/.cache/apiman/es-pid-{identifier})
        Files.createDirectories(pidPath.getParent());

        // Get the elasticServer instance variable
        Field elasticServerField = elastic.getClass().getDeclaredField("elasticServer");
        elasticServerField.setAccessible(true);
        Object elasticServerInstance = elasticServerField.get(elastic); // ElasticServer package-scoped so we can't get the real type.

        // Get the process ID (pid) long field from ElasticServer
        Field pidField = elasticServerInstance.getClass().getDeclaredField("pid");
        pidField.setAccessible(true);
        pid = (int) pidField.get(elasticServerInstance); // Get the pid

        // Write to the PID file
        Files.write(pidPath, String.valueOf(pid).getBytes(), StandardOpenOption.CREATE,
                StandardOpenOption.APPEND);
    } catch (NoSuchFieldException | SecurityException | IllegalArgumentException | IllegalAccessException e) {
        throw new RuntimeException(e);
    }

}

From source file:org.ulyssis.ipp.reader.Reader.java

/**
 * Create a new reader and connect to Redis.
 * // w  w w . j a  v  a 2  s  .c o  m
 * options are passed in, rather than
 * accessed through a singleton or such, to improve testability
 * and modularity, and to prevent hidden dependencies and
 * eventual threading issues.
 * 
 * @param options
 *           The command line options to use for this reader.
 */
public Reader(ReaderOptions options) {
    this.options = options;
    this.readerConfig = Config.getCurrentConfig().getReader(options.getId());
    this.llrpReader = new LLRPReader(this::messageReceived, this::errorOccurred);

    if (readerConfig.getType() == ReaderConfig.Type.SIMULATOR) {
        executorService = Executors.newSingleThreadScheduledExecutor();
    } else {
        executorService = null;
    }

    if (options.getNoRedis()) {
        LOG.info("Not using Redis, setting initial update count to 0.");
        this.updateCount = 0L;
        this.jedis = null;
    } else {
        this.jedis = JedisHelper.get(readerConfig.getURI());
        try {
            this.updateCount = jedis.llen("updates");
        } catch (JedisConnectionException e) {
            LOG.error("Couldn't connect to Jedis when getting update count. Setting 0 instead.", e);
            this.updateCount = 0L; // TODO: Is 0 appropriate?
        }
    }
    String statusChannel = Config.getCurrentConfig().getStatusChannel();
    this.statusReporter = new StatusReporter(readerConfig.getURI(), statusChannel);
    String controlChannel = Config.getCurrentConfig().getControlChannel();
    this.commandProcessor = new CommandProcessor(readerConfig.getURI(), controlChannel, statusReporter);
    commandProcessor.addHandler(new PingHandler());
    this.updateChannel = JedisHelper.dbLocalChannel(Config.getCurrentConfig().getUpdateChannel(),
            readerConfig.getURI());

    options.getReplayFile().ifPresent(replayFile -> {
        try {
            LOG.info("Opening replay file: {}", replayFile);
            ByteChannel channel = Files.newByteChannel(replayFile, StandardOpenOption.APPEND,
                    StandardOpenOption.CREATE);
            this.replayChannel = Optional.of(channel);
        } catch (IOException e) {
            LOG.error("Couldn't open channel for logging to replay file: {}", replayFile, e);
        }
    });

    this.lastUpdateForTag = new HashMap<>();
}

From source file:com.evolveum.midpoint.provisioning.impl.manual.TestSemiManual.java

private void appendToCsv(String[] data) throws IOException {
    String line = formatCsvLine(data) + "\n";
    Files.write(Paths.get(CSV_TARGET_FILE.getPath()), line.getBytes(), StandardOpenOption.APPEND);
}