Example usage for java.io RandomAccessFile getFD

List of usage examples for java.io RandomAccessFile getFD

Introduction

In this page you can find the example usage for java.io RandomAccessFile getFD.

Prototype

public final FileDescriptor getFD() throws IOException 

Source Link

Document

Returns the opaque file descriptor object associated with this stream.

Usage

From source file:org.apache.sshd.server.filesystem.NativeSshFile.java

/**
 * Create input stream for reading./*from  w  ww .j  av a2 s  . c  om*/
 */
public InputStream createInputStream(final long offset) throws IOException {

    // permission check
    if (!isReadable()) {
        throw new IOException("No read permission : " + file.getName());
    }

    // move to the appropriate offset and create input stream
    final RandomAccessFile raf = new RandomAccessFile(file, "r");
    raf.seek(offset);

    // The IBM jre needs to have both the stream and the random access file
    // objects closed to actually close the file
    return new FileInputStream(raf.getFD()) {
        public void close() throws IOException {
            super.close();
            raf.close();
        }
    };
}

From source file:org.apache.lucene.store.FSDirectory.java

protected void fsync(File fullFile) throws IOException {
    boolean success = false;
    int retryCount = 0;
    IOException exc = null;//w w w . j  a  v  a2 s.  c  om
    while (!success && retryCount < 5) {
        retryCount++;
        RandomAccessFile file = null;
        try {
            try {
                file = new RandomAccessFile(fullFile, "rw");
                file.getFD().sync();
                success = true;
            } finally {
                if (file != null)
                    file.close();
            }
        } catch (IOException ioe) {
            if (exc == null)
                exc = ioe;
            try {
                // Pause 5 msec
                Thread.sleep(5);
            } catch (InterruptedException ie) {
                throw new ThreadInterruptedException(ie);
            }
        }
    }
    if (!success)
        // Throw original exception
        throw exc;
}

From source file:org.apache.sshd.server.filesystem.NativeSshFile.java

/**
 * Create output stream for writing./*from   w w w . j  a  va  2 s.c o m*/
 */
public OutputStream createOutputStream(final long offset) throws IOException {

    // permission check
    if (!isWritable()) {
        throw new IOException("No write permission : " + file.getName());
    }

    // create output stream
    final RandomAccessFile raf = new RandomAccessFile(file, "rw");
    raf.setLength(offset);
    raf.seek(offset);

    // The IBM jre needs to have both the stream and the random access file
    // objects closed to actually close the file
    return new FileOutputStream(raf.getFD()) {
        public void close() throws IOException {
            super.close();
            raf.close();
        }
    };
}

From source file:net.gleamynode.oil.impl.wal.store.FileLogStore.java

public void open() {
    if (isOpen()) {
        throw new IllegalStateException();
    }/* w w w  .j a va 2s.  c  om*/

    parseProperties();

    try {
        File parentDir = file.getCanonicalFile().getParentFile();

        if (!parentDir.exists()) {
            parentDir.mkdirs();
        }
    } catch (IOException e) {
        throw new OilException("failed to get parent directory path.", e);
    }

    RandomAccessFile raf = null;

    if (!useExternalCatalog) {
        catalog = new ClassCatalog(file.getPath() + ".cat");
    }

    boolean done = false;

    try {
        raf = new RandomAccessFile(file, "rw");
        raf.seek(0L);
        reader = new FileLogReader(catalog, raf, new FileInputStream(raf.getFD()), maxItemSize);
        raf = new RandomAccessFile(file, "rw");
        raf.seek(raf.length());
        writer = new FileLogWriter(catalog, new FileOutputStream(raf.getFD()), maxItemSize);

        flusher = new Flusher();
        flusher.start();
        done = true;
    } catch (IOException e) {
        throw new OilException(e);
    } finally {
        if (!done) {
            if (reader != null) {
                try {
                    reader.close();
                } catch (IOException e) {
                }

                reader = null;
            }

            if (writer != null) {
                try {
                    writer.close();
                } catch (IOException e) {
                }

                writer = null;
            }

            if (!useExternalCatalog) {
                catalog.close();
            }
        }
    }
}

From source file:net.timewalker.ffmq4.storage.data.impl.journal.BlockBasedDataStoreJournal.java

private void syncStoreFile(RandomAccessFile storeFile) throws JournalException {
    try {//w  w w . j  ava  2s . c o m
        switch (storageSyncMethod) {
        case StorageSyncMethod.FD_SYNC:
            storeFile.getFD().sync();
            break;
        case StorageSyncMethod.CHANNEL_FORCE_NO_META:
            storeFile.getChannel().force(false);
            break;
        default:
            throw new JournalException("Unsupported sync method : " + storageSyncMethod);
        }
    } catch (IOException e) {
        log.error("[" + baseName + "] Could not sync store file", e);
        throw new JournalException("Could not sync store file");
    }
}

From source file:org.apache.hadoop.hdfs.tools.offlineImageViewer.PBImageTextWriter.java

public void visit(RandomAccessFile file) throws IOException {
    Configuration conf = new Configuration();
    if (!FSImageUtil.checkFileFormat(file)) {
        throw new IOException("Unrecognized FSImage");
    }// w w  w .j a v  a 2s.c  om

    FileSummary summary = FSImageUtil.loadSummary(file);

    try (FileInputStream fin = new FileInputStream(file.getFD())) {
        InputStream is;
        ArrayList<FileSummary.Section> sections = Lists.newArrayList(summary.getSectionsList());
        Collections.sort(sections, new Comparator<FileSummary.Section>() {
            @Override
            public int compare(FsImageProto.FileSummary.Section s1, FsImageProto.FileSummary.Section s2) {
                FSImageFormatProtobuf.SectionName n1 = FSImageFormatProtobuf.SectionName
                        .fromString(s1.getName());
                FSImageFormatProtobuf.SectionName n2 = FSImageFormatProtobuf.SectionName
                        .fromString(s2.getName());
                if (n1 == null) {
                    return n2 == null ? 0 : -1;
                } else if (n2 == null) {
                    return -1;
                } else {
                    return n1.ordinal() - n2.ordinal();
                }
            }
        });

        for (FileSummary.Section section : sections) {
            fin.getChannel().position(section.getOffset());
            is = FSImageUtil.wrapInputStreamForCompression(conf, summary.getCodec(),
                    new BufferedInputStream(new LimitInputStream(fin, section.getLength())));
            switch (SectionName.fromString(section.getName())) {
            case STRING_TABLE:
                stringTable = FSImageLoader.loadStringTable(is);
                break;
            default:
                break;
            }
        }

        loadDirectories(fin, sections, summary, conf);
        loadINodeDirSection(fin, sections, summary, conf);
        metadataMap.sync();
        output(conf, summary, fin, sections);
    }
}

From source file:org.apache.flume.channel.file.TestFileChannelRestart.java

private void doTestTruncatedCheckpointMeta(boolean backup) throws Exception {
    Map<String, String> overrides = Maps.newHashMap();
    overrides.put(FileChannelConfiguration.USE_DUAL_CHECKPOINTS, String.valueOf(backup));
    channel = createFileChannel(overrides);
    channel.start();/*from ww w . j  a  va  2  s .  c o  m*/
    Assert.assertTrue(channel.isOpen());
    Set<String> in = putEvents(channel, "restart", 10, 100);
    Assert.assertEquals(100, in.size());
    forceCheckpoint(channel);
    if (backup) {
        Thread.sleep(2000);
    }
    channel.stop();
    File checkpoint = new File(checkpointDir, "checkpoint");
    RandomAccessFile writer = new RandomAccessFile(Serialization.getMetaDataFile(checkpoint), "rw");
    writer.setLength(0);
    writer.getFD().sync();
    writer.close();
    channel = createFileChannel(overrides);
    channel.start();
    Assert.assertTrue(channel.isOpen());
    Assert.assertTrue(!backup || channel.checkpointBackupRestored());
    Set<String> out = consumeChannel(channel);
    compareInputAndOut(in, out);
}

From source file:org.apache.flume.channel.file.TestFileChannelRestart.java

private void doTestCorruptCheckpointMeta(boolean backup) throws Exception {
    Map<String, String> overrides = Maps.newHashMap();
    overrides.put(FileChannelConfiguration.USE_DUAL_CHECKPOINTS, String.valueOf(backup));
    channel = createFileChannel(overrides);
    channel.start();/*from  w  w  w .  ja va  2s  . c  o  m*/
    Assert.assertTrue(channel.isOpen());
    Set<String> in = putEvents(channel, "restart", 10, 100);
    Assert.assertEquals(100, in.size());
    forceCheckpoint(channel);
    if (backup) {
        Thread.sleep(2000);
    }
    channel.stop();
    File checkpoint = new File(checkpointDir, "checkpoint");
    RandomAccessFile writer = new RandomAccessFile(Serialization.getMetaDataFile(checkpoint), "rw");
    writer.seek(10);
    writer.writeLong(new Random().nextLong());
    writer.getFD().sync();
    writer.close();
    channel = createFileChannel(overrides);
    channel.start();
    Assert.assertTrue(channel.isOpen());
    Assert.assertTrue(!backup || channel.checkpointBackupRestored());
    Set<String> out = consumeChannel(channel);
    compareInputAndOut(in, out);
}

From source file:org.apache.flume.channel.file.TestFileChannelRestart.java

private void doTestBadCheckpointVersion(boolean backup) throws Exception {
    Map<String, String> overrides = Maps.newHashMap();
    overrides.put(FileChannelConfiguration.USE_DUAL_CHECKPOINTS, String.valueOf(backup));
    channel = createFileChannel(overrides);
    channel.start();/*from  w  w  w .j  av  a2 s .  c  om*/
    Assert.assertTrue(channel.isOpen());
    Set<String> in = putEvents(channel, "restart", 10, 100);
    Assert.assertEquals(100, in.size());
    forceCheckpoint(channel);
    if (backup) {
        Thread.sleep(2000);
    }
    channel.stop();
    File checkpoint = new File(checkpointDir, "checkpoint");
    RandomAccessFile writer = new RandomAccessFile(checkpoint, "rw");
    writer.seek(EventQueueBackingStoreFile.INDEX_VERSION * Serialization.SIZE_OF_LONG);
    writer.writeLong(2L);
    writer.getFD().sync();
    writer.close();
    channel = createFileChannel(overrides);
    channel.start();
    Assert.assertTrue(channel.isOpen());
    Assert.assertTrue(!backup || channel.checkpointBackupRestored());
    Set<String> out = consumeChannel(channel);
    compareInputAndOut(in, out);
}

From source file:org.apache.flume.channel.file.TestFileChannelRestart.java

private void testFastReplay(boolean shouldCorruptCheckpoint, boolean useFastReplay) throws Exception {
    Map<String, String> overrides = Maps.newHashMap();
    overrides.put(FileChannelConfiguration.USE_FAST_REPLAY, String.valueOf(useFastReplay));
    channel = createFileChannel(overrides);
    channel.start();/*from   w  w w.  jav  a  2s  .c o  m*/
    Assert.assertTrue(channel.isOpen());
    Set<String> in = putEvents(channel, "restart", 10, 100);
    Assert.assertEquals(100, in.size());
    forceCheckpoint(channel);
    channel.stop();
    if (shouldCorruptCheckpoint) {
        File checkpoint = new File(checkpointDir, "checkpoint");
        RandomAccessFile writer = new RandomAccessFile(Serialization.getMetaDataFile(checkpoint), "rw");
        writer.seek(10);
        writer.writeLong(new Random().nextLong());
        writer.getFD().sync();
        writer.close();
    }
    channel = createFileChannel(overrides);
    channel.start();
    Assert.assertTrue(channel.isOpen());
    Set<String> out = consumeChannel(channel);
    if (useFastReplay && shouldCorruptCheckpoint) {
        Assert.assertTrue(channel.didFastReplay());
    } else {
        Assert.assertFalse(channel.didFastReplay());
    }
    compareInputAndOut(in, out);
}