Example usage for java.io RandomAccessFile seek

List of usage examples for java.io RandomAccessFile seek

Introduction

In this page you can find the example usage for java.io RandomAccessFile seek.

Prototype

public void seek(long pos) throws IOException 

Source Link

Document

Sets the file-pointer offset, measured from the beginning of this file, at which the next read or write occurs.

Usage

From source file:com.btoddb.fastpersitentqueue.MemorySegmentSerializer.java

public boolean searchOffline(MemorySegment seg, FpqEntry target) throws IOException {
    RandomAccessFile raFile = new RandomAccessFile(createPagingFile(seg), "r");
    try {//  ww  w  . j  a  va 2  s.  c o  m
        // jump over header info - we already have it
        raFile.seek(seg.getEntryListOffsetOnDisk());
        for (int i = 0; i < seg.getNumberOfEntries(); i++) {
            FpqEntry entry = new FpqEntry();
            entry.readFromPaging(raFile);
            if (target.equals(entry)) {
                return true;
            }
        }
        return false;
    } finally {
        raFile.close();
    }
}

From source file:org.xdi.util.FileUtil.java

/**
 * Writes data in a file on specified position
 * // w  w w .ja  va  2  s.co m
 * @param filePath
 * @param position
 * @param data
 * @return
 */
public boolean writeToFile(String filePath, long position, String data) {

    try {
        File f = new File(filePath);
        RandomAccessFile raf;
        raf = new RandomAccessFile(f, "rw");
        raf.seek(position);
        StringBuilder dataAfterPostion = new StringBuilder(data);
        while (raf.getFilePointer() < raf.length()) {
            String line = raf.readLine();
            dataAfterPostion.append(line);
        }
        raf.seek(position);
        raf.writeUTF(dataAfterPostion.toString());
        raf.close();
        return true;
    } catch (FileNotFoundException e) {
        e.printStackTrace();
        return false;
    } catch (IOException e) {
        e.printStackTrace();
        return false;
    }
}

From source file:org.openoverlayrouter.noroot.logActivity.java

public void refresh() {

    StringBuffer contents = new StringBuffer();

    final StringBuffer fixedContents = contents;

    try {/*w ww.  j  a v  a  2s.  c  o  m*/
        RandomAccessFile logFile = new RandomAccessFile(log_file, "r");
        if (logFile.length() > maxReadBytes) {
            logFile.seek(logFile.length() - maxReadBytes);
        }
        String currentLine = logFile.readLine();
        while (currentLine != null) {

            if (currentLine != null) {
                contents.append(currentLine);
                contents.append('\n');
            }
            currentLine = logFile.readLine();
        }
        try {
            if (logFile != null) {
                logFile.close();
            }
        } catch (IOException e) {
            e.printStackTrace();
        }
    } catch (FileNotFoundException e) {
        e.printStackTrace();
    } catch (IOException e) {
        e.printStackTrace();
    } finally {

    }

    mHandler.post(new Runnable() {
        public void run() {

            // Put the file contents into the TextView
            TextView log = (TextView) llLayout.findViewById(R.id.logView);
            log.setText(fixedContents);

            // Auto scroll to the bottom
            final ScrollView scroll = (ScrollView) llLayout.findViewById(R.id.scrollView1);
            scroll.post(new Runnable() {
                public void run() {
                    scroll.fullScroll(View.FOCUS_DOWN);
                }
            });
            if (myDialog != null) {
                myDialog.dismiss();
                myDialog = null;
            }
        }
    });
}

From source file:com.example.android.vault.EncryptedDocumentTest.java

public void testBitTwiddle() throws Exception {
    final EncryptedDocument doc = new EncryptedDocument(4, mFile, mDataKey, mMacKey);

    // write some metadata
    final JSONObject before = new JSONObject();
    before.put("twiddle", "twiddle");
    doc.writeMetadataAndContent(before, null);

    final RandomAccessFile f = new RandomAccessFile(mFile, "rw");
    f.seek(f.length() - 4);
    f.write(0x00);/*from  w  ww.  j a  v  a 2s.  c  o  m*/
    f.close();

    try {
        doc.readMetadata();
        fail("somehow passed hmac");
    } catch (DigestException expected) {
    }
}

From source file:com.puppycrawl.tools.checkstyle.checks.NewlineAtEndOfFileCheck.java

/**
 * Checks whether the content provided by the Reader ends with the platform
 * specific line separator./*from ww w . j  av  a 2  s .c  o m*/
 * @param randomAccessFile The reader for the content to check
 * @return boolean Whether the content ends with a line separator
 * @throws IOException When an IO error occurred while reading from the
 *         provided reader
 */
private boolean endsWithNewline(RandomAccessFile randomAccessFile) throws IOException {
    final int len = lineSeparator.length();
    if (randomAccessFile.length() < len) {
        return false;
    }
    randomAccessFile.seek(randomAccessFile.length() - len);
    final byte[] lastBytes = new byte[len];
    final int readBytes = randomAccessFile.read(lastBytes);
    if (readBytes != len) {
        throw new IOException("Unable to read " + len + " bytes, got " + readBytes);
    }
    return lineSeparator.matches(lastBytes);
}

From source file:org.myframe.http.FileRequest.java

public byte[] handleResponse(HttpResponse response) throws IOException, KJHttpException {
    HttpEntity entity = response.getEntity();
    long fileSize = entity.getContentLength();
    if (fileSize <= 0) {
        MLoger.debug("Response doesn't present Content-Length!");
    }/*from   ww w . j  a  va  2s.  c o m*/

    long downloadedSize = mTemporaryFile.length();
    boolean isSupportRange = HttpUtils.isSupportRange(response);
    if (isSupportRange) {
        fileSize += downloadedSize;

        String realRangeValue = HttpUtils.getHeader(response, "Content-Range");
        if (!TextUtils.isEmpty(realRangeValue)) {
            String assumeRangeValue = "bytes " + downloadedSize + "-" + (fileSize - 1);
            if (TextUtils.indexOf(realRangeValue, assumeRangeValue) == -1) {
                throw new IllegalStateException("The Content-Range Header is invalid Assume[" + assumeRangeValue
                        + "] vs Real[" + realRangeValue + "], " + "please remove the temporary file ["
                        + mTemporaryFile + "].");
            }
        }
    }

    if (fileSize > 0 && mStoreFile.length() == fileSize) {
        mStoreFile.renameTo(mTemporaryFile);
        mRequestQueue.getConfig().mDelivery.postDownloadProgress(this, fileSize, fileSize);
        return null;
    }

    RandomAccessFile tmpFileRaf = new RandomAccessFile(mTemporaryFile, "rw");
    if (isSupportRange) {
        tmpFileRaf.seek(downloadedSize);
    } else {
        tmpFileRaf.setLength(0);
        downloadedSize = 0;
    }

    try {
        InputStream in = entity.getContent();
        if (HttpUtils.isGzipContent(response) && !(in instanceof GZIPInputStream)) {
            in = new GZIPInputStream(in);
        }
        byte[] buffer = new byte[6 * 1024]; // 6K buffer
        int offset;

        while ((offset = in.read(buffer)) != -1) {
            tmpFileRaf.write(buffer, 0, offset);

            downloadedSize += offset;
            mRequestQueue.getConfig().mDelivery.postDownloadProgress(this, fileSize, downloadedSize);

            if (isCanceled()) {
                break;
            }
        }
    } finally {
        try {
            if (entity != null)
                entity.consumeContent();
        } catch (Exception e) {
            MLoger.debug("Error occured when calling consumingContent");
        }
        tmpFileRaf.close();
    }
    return null;
}

From source file:org.apache.flume.channel.file.TestEventQueueBackingStoreFactory.java

@Test(expected = InvalidProtocolBufferException.class)
public void testCorruptMeta() throws Throwable {
    EventQueueBackingStore backingStore = EventQueueBackingStoreFactory.get(checkpoint, 10, "test");
    backingStore.close();/*from w  ww . j  av a 2s . c o m*/
    Assert.assertTrue(checkpoint.exists());
    File metaFile = Serialization.getMetaDataFile(checkpoint);
    Assert.assertTrue(metaFile.length() != 0);
    RandomAccessFile writer = new RandomAccessFile(metaFile, "rw");
    writer.seek(10);
    writer.writeLong(new Random().nextLong());
    writer.getFD().sync();
    writer.close();
    try {
        backingStore = EventQueueBackingStoreFactory.get(checkpoint, 10, "test");
    } catch (BadCheckpointException ex) {
        throw ex.getCause();
    }
}

From source file:com.scut.easyfe.network.kjFrame.http.FileRequest.java

public byte[] handleResponse(HttpResponse response) throws IOException, KJHttpException {
    HttpEntity entity = response.getEntity();
    long fileSize = entity.getContentLength();
    if (fileSize <= 0) {
        KJLoger.debug("Response doesn't present Content-Length!");
    }//w  ww  .  ja  v  a  2 s  . co  m

    long downloadedSize = mTemporaryFile.length();
    boolean isSupportRange = HttpUtils.isSupportRange(response);
    if (isSupportRange) {
        fileSize += downloadedSize;

        String realRangeValue = HttpUtils.getHeader(response, "Content-Range");
        if (!TextUtils.isEmpty(realRangeValue)) {
            String assumeRangeValue = "bytes " + downloadedSize + "-" + (fileSize - 1);
            if (TextUtils.indexOf(realRangeValue, assumeRangeValue) == -1) {
                throw new IllegalStateException("The Content-Range Header is invalid Assume[" + assumeRangeValue
                        + "] vs Real[" + realRangeValue + "], " + "please remove the temporary file ["
                        + mTemporaryFile + "].");
            }
        }
    }

    if (fileSize > 0 && mStoreFile.length() == fileSize) {
        mStoreFile.renameTo(mTemporaryFile);
        mRequestQueue.getConfig().mDelivery.postDownloadProgress(this, fileSize, fileSize);
        return null;
    }

    RandomAccessFile tmpFileRaf = new RandomAccessFile(mTemporaryFile, "rw");
    if (isSupportRange) {
        tmpFileRaf.seek(downloadedSize);
    } else {
        tmpFileRaf.setLength(0);
        downloadedSize = 0;
    }

    try {
        InputStream in = entity.getContent();
        if (HttpUtils.isGzipContent(response) && !(in instanceof GZIPInputStream)) {
            in = new GZIPInputStream(in);
        }
        byte[] buffer = new byte[6 * 1024]; // 6K buffer
        int offset;

        while ((offset = in.read(buffer)) != -1) {
            tmpFileRaf.write(buffer, 0, offset);

            downloadedSize += offset;
            mRequestQueue.getConfig().mDelivery.postDownloadProgress(this, fileSize, downloadedSize);

            if (isCanceled()) {
                break;
            }
        }
    } finally {
        try {
            if (entity != null)
                entity.consumeContent();
        } catch (Exception e) {
            KJLoger.debug("Error occured when calling consumingContent");
        }
        tmpFileRaf.close();
    }
    return null;
}

From source file:org.commoncrawl.service.crawler.CrawlSegmentLog.java

public static int reconcileLogFile(FileSystem fs, Path logFilePath, int listId, int segmentId,
        CrawlSegmentFPMap segment, File consolidationFile) throws IOException {

    RandomAccessFile consolidationStream = null;

    int consolidationFileItemCount = 0;

    if (consolidationFile != null) {
        consolidationStream = new RandomAccessFile(consolidationFile, "rw");
        consolidationFileItemCount = readerHeader(consolidationFile);
        consolidationStream.seek(consolidationStream.length());
    }/*from  w w  w . ja v a 2 s  .  c  o  m*/

    int processedItemCount = 0;

    FSDataInputStream hdfsInputStream = null;

    try {

        // get the file size on disk 
        long fileSize = fs.getFileStatus(logFilePath).getLen();

        // allocate an array that can hold up to the list size of items ...
        byte[] buffer = new byte[DEFAULT_LOGITEM_LIST_SIZE * LogItem.ItemSize_Bytes];

        // calcuate item count 
        int totalItemCount = (int) ((fileSize - getHeaderSize()) / LogItem.ItemSize_Bytes);

        // get a reader ... 

        hdfsInputStream = fs.open(logFilePath);

        int headerItemCount = readHeader(hdfsInputStream);

        if (headerItemCount != totalItemCount) {
            LOG.warn("CrawlSegmentLog - header item count for log file:" + logFilePath.toString() + " is:"
                    + headerItemCount + " file size indicates:" + totalItemCount);
            totalItemCount = headerItemCount;
        }

        int remainingItemCount = totalItemCount;

        LogItemBuffer itemList = new LogItemBuffer(listId, segmentId);

        while (remainingItemCount != 0) {

            int blockItemCount = Math.min(remainingItemCount, DEFAULT_LOGITEM_LIST_SIZE);

            // and read the data 
            hdfsInputStream.read(buffer, 0, (int) blockItemCount * LogItem.ItemSize_Bytes);
            // and if consolidation stream is valid ... 
            if (consolidationStream != null) {
                // add entries to that stream ... 
                consolidationStream.write(buffer, 0, (int) blockItemCount * LogItem.ItemSize_Bytes);
            }

            // if not a dry run... 
            if (segment != null) {
                // populate the item list   
                itemList.loadFromStream(buffer, blockItemCount);
                // reconcile the list against the segment 
                processedItemCount += reconcileItemList(itemList, segment);
            }
            // reduce item count 
            remainingItemCount -= blockItemCount;
        }

        // finally if consolidation stream is valid ... 
        if (consolidationStream != null) {
            // update the file's header .. 
            writeHeader(consolidationFile, consolidationFileItemCount + totalItemCount);
        }
    } finally {
        if (consolidationStream != null) {
            consolidationStream.close();
        }
        if (hdfsInputStream != null) {
            hdfsInputStream.close();
        }
    }
    return processedItemCount;
}

From source file:name.martingeisse.stackd.server.section.storage.FolderBasedSectionStorage.java

/**
 * // w  w  w .j av a 2  s.  c o  m
 */
private boolean loadSectionFromFile(final OutputStream out, final RandomAccessFile access, final int tocIndex)
        throws IOException {

    // read the ToC entry
    access.seek(tocIndex * 12);
    final int dataStartAddress = access.readInt();
    final int dataSize = access.readInt();
    /* int dataFlags = */access.readInt();

    // handle missing sections
    if (dataStartAddress < 1) {
        return false;
    }

    // read the data
    final byte[] compressedCubeData = new byte[dataSize];
    access.seek(dataStartAddress);
    access.readFully(compressedCubeData);

    // write data to the stream
    out.write(compressedCubeData);

    return true;

}