List of usage examples for java.io RandomAccessFile seek
public void seek(long pos) throws IOException
From source file:org.commoncrawl.service.crawler.CrawlLog.java
private static void transferLocalCheckpointLog(File crawlLogPath, HDFSCrawlURLWriter writer, long checkpointId) throws IOException { // and open the crawl log file ... RandomAccessFile inputStream = null; IOException exception = null; CRC32 crc = new CRC32(); CustomByteArrayOutputStream buffer = new CustomByteArrayOutputStream(1 << 17); byte[] syncBytesBuffer = new byte[SYNC_BYTES_SIZE]; // save position for potential debug output. long lastReadPosition = 0; try {//w w w . j a va 2 s. c om inputStream = new RandomAccessFile(crawlLogPath, "rw"); // and a data input stream ... RandomAccessFile reader = inputStream; // seek to zero reader.seek(0L); // read the header ... LogFileHeader header = readLogFileHeader(reader); // read a crawl url from the stream... while (inputStream.getFilePointer() < header._fileSize) { if (seekToNextSyncBytesPos(syncBytesBuffer, reader, header._fileSize)) { try { lastReadPosition = inputStream.getFilePointer(); // skip sync inputStream.skipBytes(SYNC_BYTES_SIZE); // read length ... int urlDataLen = reader.readInt(); long urlDataCRC = reader.readLong(); if (urlDataLen > buffer.getBuffer().length) { buffer = new CustomByteArrayOutputStream(((urlDataLen / 65536) + 1) * 65536); } reader.read(buffer.getBuffer(), 0, urlDataLen); crc.reset(); crc.update(buffer.getBuffer(), 0, urlDataLen); long computedValue = crc.getValue(); // validate crc values ... if (computedValue != urlDataCRC) { LOG.error("CRC Mismatch Detected during HDFS transfer in CrawlLog:" + crawlLogPath.getAbsolutePath() + " Checkpoint Id:" + checkpointId + " FilePosition:" + lastReadPosition); inputStream.seek(lastReadPosition + 1); } else { // allocate a crawl url data structure CrawlURL url = new CrawlURL(); DataInputStream bufferReader = new DataInputStream( new ByteArrayInputStream(buffer.getBuffer(), 0, urlDataLen)); // populate it from the (in memory) data stream url.readFields(bufferReader); try { // and write out appropriate sequence file entries ... writer.writeCrawlURLItem(new Text(url.getUrl()), url); } catch (IOException e) { LOG.error("Failed to write CrawlURL to SequenceFileWriter with Exception:" + CCStringUtils.stringifyException(e)); throw new URLWriterException(); } } } catch (URLWriterException e) { LOG.error("Caught URLRewriter Exception! - Throwing to outer layer!"); throw e; } catch (Exception e) { LOG.error("Ignoring Error Processing CrawlLog Entry at Position:" + lastReadPosition + " Exception:" + CCStringUtils.stringifyException(e)); } } else { break; } } } catch (EOFException e) { LOG.error("Caught EOF Exception during read of local CrawlLog:" + crawlLogPath.getAbsolutePath() + " Checkpoint Id:" + checkpointId + " FilePosition:" + lastReadPosition); } catch (IOException e) { LOG.error(CCStringUtils.stringifyException(e)); exception = e; throw e; } finally { if (inputStream != null) inputStream.close(); } }
From source file:org.kawanfw.file.servlet.util.FileTransferManager.java
public boolean download(OutputStream out, FileConfigurator fileConfigurator, String username, String filename, long chunkLength) throws FileNotFoundException, IOException { InputStream in = null;// ww w . j a v a 2s .c om debug(new Date() + " DOWNLOAD SESSION BEGIN "); try { filename = HttpConfigurationUtil.addRootPath(fileConfigurator, username, filename); debug(new Date() + " DOWNLOAD CHUNK"); // Do we must download a chunk only ? We will then seek the // Random access file and read only one chunk length and send it // back to client if (filename.endsWith(".kawanfw.chunk")) { // We are now in chunk case String rawFilename = StringUtils.substringBeforeLast(filename, ".kawanfw.chunk"); String indexStr = StringUtils.substringAfterLast(rawFilename, "."); int index = Integer.parseInt(indexStr); // Remove the number rawFilename = StringUtils.substringBeforeLast(rawFilename, "."); // We seek the total length of previous files, because client // method // is idempotent and may be replayed long lengthToSeek = (index - 1) * chunkLength; // debug("index : " + index); // debug("chunkLength : " + chunkLength); // debug("lengthToSeek: " + lengthToSeek); debug(""); debug(new Date() + " SESSION " + " " + index); RandomAccessFile raf = null; try { File file = new File(rawFilename); if (!file.exists()) { debug("File does not exists: " + file); return false; } debug(new Date() + " BEFORE SEEK "); debug(new Date() + " BEFORE CREATE RAF"); raf = new RandomAccessFile(file, "rw"); debug(new Date() + " AFTER CREATE RAF"); raf.seek(lengthToSeek); debug(new Date() + " BEFORE COPY "); long totalRead = copy(raf, out, chunkLength); debug(new Date() + " AFTER COPY " + totalRead); IOUtils.closeQuietly(raf); if (lengthToSeek + totalRead >= file.length()) { // End of operations // Nothing yo do with Random Access File } } finally { IOUtils.closeQuietly(raf); } return true; } else { debug(new Date() + " DOWNLOAD FULL FILE"); File file = new File(filename); if (!file.exists()) { debug("File does not exists: " + file); return false; } in = new BufferedInputStream(new FileInputStream(file)); IOUtils.copy(in, out); } return true; } finally { IOUtils.closeQuietly(in); } }
From source file:com.alu.e3.logger.LogCollector.java
private static String getTailOfFile(File file, int numLines) throws FileNotFoundException, IOException { if (numLines < 0) { return null; } else if (numLines == 0) { return ""; }//from w ww.ja va 2 s. c om java.io.RandomAccessFile raFile = new java.io.RandomAccessFile(file, "r"); long fileLength = file.length() - 1; StringBuilder sb = new StringBuilder(); int line = 0; for (long filePointer = fileLength; filePointer >= 0; filePointer--) { raFile.seek(filePointer); int readByte = raFile.readByte(); if (readByte == 0xA) { if (filePointer < fileLength) { line = line + 1; if (line >= numLines) { break; } } } sb.append((char) readByte); } String lastLines = sb.reverse().toString(); return lastLines; }
From source file:org.commoncrawl.service.crawler.CrawlLog.java
public static void walkCrawlLogFile(File crawlLogPath, long startOffset) throws IOException { // and open the crawl log file ... RandomAccessFile inputStream = null; IOException exception = null; CRC32 crc = new CRC32(); CustomByteArrayOutputStream buffer = new CustomByteArrayOutputStream(1 << 17); byte[] syncBytesBuffer = new byte[SYNC_BYTES_SIZE]; // save position for potential debug output. long lastReadPosition = 0; try {//from w ww. ja v a 2s . com inputStream = new RandomAccessFile(crawlLogPath, "rw"); // and a data input stream ... RandomAccessFile reader = inputStream; // seek to zero reader.seek(0L); // read the header ... LogFileHeader header = readLogFileHeader(reader); System.out.println("Header ItemCount:" + header._itemCount + " FileSize:" + header._fileSize); if (startOffset != 0L) { System.out.println("Preseeking to:" + startOffset); reader.seek(startOffset); } Configuration conf = new Configuration(); // read a crawl url from the stream... long recordCount = 0; while (inputStream.getFilePointer() < header._fileSize) { // System.out.println("PRE-SYNC SeekPos:"+ // inputStream.getFilePointer()); if (seekToNextSyncBytesPos(syncBytesBuffer, reader, header._fileSize)) { // System.out.println("POST-SYNC SeekPos:"+ // inputStream.getFilePointer()); lastReadPosition = inputStream.getFilePointer(); // skip sync inputStream.skipBytes(SYNC_BYTES_SIZE); // read length ... int urlDataLen = reader.readInt(); long urlDataCRC = reader.readLong(); if (urlDataLen > buffer.getBuffer().length) { buffer = new CustomByteArrayOutputStream(((urlDataLen / 65536) + 1) * 65536); } reader.read(buffer.getBuffer(), 0, urlDataLen); crc.reset(); crc.update(buffer.getBuffer(), 0, urlDataLen); long computedValue = crc.getValue(); // validate crc values ... if (computedValue != urlDataCRC) { LOG.error("CRC Mismatch Detected during HDFS transfer in CrawlLog:" + crawlLogPath.getAbsolutePath() + " FilePosition:" + lastReadPosition); inputStream.seek(lastReadPosition + 1); } else { if (recordCount++ % 10000 == 0) { // allocate a crawl url data structure CrawlURL url = new CrawlURL(); DataInputStream bufferReader = new DataInputStream( new ByteArrayInputStream(buffer.getBuffer(), 0, urlDataLen)); // populate it from the (in memory) data stream url.readFields(bufferReader); System.out.println("Record:" + recordCount + " At:" + lastReadPosition + " URL:" + url.getUrl() + " BuffSize:" + urlDataLen + " ContentLen:" + url.getContentRaw().getCount() + " LastModified:" + new Date(url.getLastAttemptTime()).toString()); } } } else { break; } } } catch (EOFException e) { LOG.error("Caught EOF Exception during read of local CrawlLog:" + crawlLogPath.getAbsolutePath() + " FilePosition:" + lastReadPosition); } catch (IOException e) { LOG.error(CCStringUtils.stringifyException(e)); exception = e; throw e; } finally { if (inputStream != null) inputStream.close(); } }
From source file:org.apache.james.mailrepository.file.MBoxMailRepository.java
/** * Quickly find a message by using the stored message offsets * //w w w . ja va2s .c om * @param key * The key of the message to find */ private MimeMessage selectMessage(final String key) { MimeMessage foundMessage = null; // Can we find the key first if (mList == null || !mList.containsKey(key)) { // Not initiailised so no point looking if ((getLogger().isDebugEnabled())) { String logBuffer = this.getClass().getName() + " mList - key not found " + mboxFile; getLogger().debug(logBuffer); } return foundMessage; } long messageStart = mList.get(key); if ((getLogger().isDebugEnabled())) { String logBuffer = this.getClass().getName() + " Load message starting at offset " + messageStart + " from file " + mboxFile; getLogger().debug(logBuffer); } // Now try and find the position in the file RandomAccessFile ins = null; try { ins = new RandomAccessFile(mboxFile, "r"); if (messageStart != 0) { ins.seek(messageStart - 1); } MessageAction op = new MessageAction() { public boolean isComplete() { return true; } public MimeMessage messageAction(String messageSeparator, String bodyText, long messageStart) { try { if (key.equals(generateKeyValue(bodyText))) { getLogger() .debug(this.getClass().getName() + " Located message. Returning MIME message"); return convertTextToMimeMessage(bodyText); } } catch (NoSuchAlgorithmException e) { getLogger().error("MD5 not supported! ", e); } return null; } }; foundMessage = this.parseMboxFile(ins, op); } catch (FileNotFoundException e) { getLogger().error("Unable to save(open) file (File not found) " + mboxFile, e); } catch (IOException e) { getLogger().error("Unable to write file (General I/O problem) " + mboxFile, e); } finally { if (foundMessage == null) { if ((getLogger().isDebugEnabled())) { String logBuffer = this.getClass().getName() + " select - message not found " + mboxFile; getLogger().debug(logBuffer); } } if (ins != null) try { ins.close(); } catch (IOException e) { getLogger().error("Unable to close file (General I/O problem) " + mboxFile, e); } } return foundMessage; }
From source file:com.redhat.rhn.frontend.action.common.DownloadFile.java
@Override protected StreamInfo getStreamInfo(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { String path = ""; Map params = (Map) request.getAttribute(PARAMS); String type = (String) params.get(TYPE); if (type.equals(DownloadManager.DOWNLOAD_TYPE_KICKSTART)) { return getStreamInfoKickstart(mapping, form, request, response, path); } else if (type.equals(DownloadManager.DOWNLOAD_TYPE_COBBLER)) { String url = ConfigDefaults.get().getCobblerServerUrl() + (String) params.get(URL_STRING); KickstartHelper helper = new KickstartHelper(request); String data = ""; if (helper.isProxyRequest()) { data = KickstartManager.getInstance().renderKickstart(helper.getKickstartHost(), url); } else {/*from ww w . j a v a 2 s. co m*/ data = KickstartManager.getInstance().renderKickstart(url); } setTextContentInfo(response, data.length()); return getStreamForText(data.getBytes()); } else if (type.equals(DownloadManager.DOWNLOAD_TYPE_COBBLER_API)) { // read data from POST body String postData = new String(); String line = null; BufferedReader reader = request.getReader(); while ((line = reader.readLine()) != null) { postData += line; } // Send data URL url = new URL(ConfigDefaults.get().getCobblerServerUrl() + "/cobbler_api"); URLConnection conn = url.openConnection(); conn.setDoOutput(true); OutputStreamWriter wr = new OutputStreamWriter(conn.getOutputStream()); // this will write POST /download//cobbler_api instead of // POST /cobbler_api, but cobbler do not mind wr.write(postData, 0, postData.length()); wr.flush(); conn.connect(); // Get the response String output = new String(); BufferedReader rd = new BufferedReader(new InputStreamReader(conn.getInputStream())); while ((line = rd.readLine()) != null) { output += line; } wr.close(); KickstartHelper helper = new KickstartHelper(request); if (helper.isProxyRequest()) { // Search/replacing all instances of cobbler host with host // we pass in, for use with Spacewalk Proxy. output = output.replaceAll(ConfigDefaults.get().getCobblerHost(), helper.getForwardedHost()); } setXmlContentInfo(response, output.length()); return getStreamForXml(output.getBytes()); } else { Long fileId = (Long) params.get(FILEID); Long userid = (Long) params.get(USERID); User user = UserFactory.lookupById(userid); if (type.equals(DownloadManager.DOWNLOAD_TYPE_PACKAGE)) { Package pack = PackageFactory.lookupByIdAndOrg(fileId, user.getOrg()); setBinaryContentInfo(response, pack.getPackageSize().intValue()); path = Config.get().getString(ConfigDefaults.MOUNT_POINT) + "/" + pack.getPath(); return getStreamForBinary(path); } else if (type.equals(DownloadManager.DOWNLOAD_TYPE_SOURCE)) { Package pack = PackageFactory.lookupByIdAndOrg(fileId, user.getOrg()); List<PackageSource> src = PackageFactory.lookupPackageSources(pack); if (!src.isEmpty()) { setBinaryContentInfo(response, src.get(0).getPackageSize().intValue()); path = Config.get().getString(ConfigDefaults.MOUNT_POINT) + "/" + src.get(0).getPath(); return getStreamForBinary(path); } } else if (type.equals(DownloadManager.DOWNLOAD_TYPE_REPO_LOG)) { Channel c = ChannelFactory.lookupById(fileId); ChannelManager.verifyChannelAdmin(user, fileId); StringBuilder output = new StringBuilder(); for (String fileName : ChannelManager.getLatestSyncLogFiles(c)) { RandomAccessFile file = new RandomAccessFile(fileName, "r"); long fileLength = file.length(); if (fileLength > DOWNLOAD_REPO_LOG_LENGTH) { file.seek(fileLength - DOWNLOAD_REPO_LOG_LENGTH); // throw away text till end of the actual line file.readLine(); } else { file.seek(0); } String line; while ((line = file.readLine()) != null) { output.append(line); output.append("\n"); } file.close(); if (output.length() > DOWNLOAD_REPO_LOG_MIN_LENGTH) { break; } } setTextContentInfo(response, output.length()); return getStreamForText(output.toString().getBytes()); } else if (type.equals(DownloadManager.DOWNLOAD_TYPE_CRASHFILE)) { CrashFile crashFile = CrashManager.lookupCrashFileByUserAndId(user, fileId); String crashPath = crashFile.getCrash().getStoragePath(); setBinaryContentInfo(response, (int) crashFile.getFilesize()); path = Config.get().getString(ConfigDefaults.MOUNT_POINT) + "/" + crashPath + "/" + crashFile.getFilename(); return getStreamForBinary(path); } } throw new UnknownDownloadTypeException( "The specified download type " + type + " is not currently supported"); }
From source file:com.stimulus.archiva.domain.Volume.java
protected void readVolumeInfoLines(RandomAccessFile randomAccessFile) { logger.debug("readVolumeInfoLines()"); String line;// w w w .j a va 2s .co m StringTokenizer st; try { randomAccessFile.seek(0); while ((line = randomAccessFile.readLine()) != null) { if (line.startsWith("#") || line.length() < 1) continue; try { st = new StringTokenizer(line, ":"); } catch (NoSuchElementException nse) { logger.debug("possible volumeinfo corruption. no such element."); continue; } String name = st.nextToken(); if (name.toLowerCase(Locale.ENGLISH).trim().equals("modified")) setClosedDate(DateUtil.convertStringToDate(st.nextToken().trim())); else if (name.toLowerCase(Locale.ENGLISH).trim().equals("latestarchived")) setClosedDate(DateUtil.convertStringToDate(st.nextToken().trim())); else if (name.toLowerCase(Locale.ENGLISH).trim().equals("closed")) setClosedDate(DateUtil.convertStringToDate(st.nextToken().trim())); else if (name.toLowerCase(Locale.ENGLISH).trim().equals("created")) setCreatedDate(DateUtil.convertStringToDate(st.nextToken().trim())); else if (name.toLowerCase(Locale.ENGLISH).trim().equals("earliestarchived")) setCreatedDate(DateUtil.convertStringToDate(st.nextToken().trim())); else if (name.toLowerCase(Locale.ENGLISH).trim().equals("version")) setVersion(st.nextToken().trim()); else if (name.toLowerCase(Locale.ENGLISH).trim().equals("id")) setID(st.nextToken().trim()); else if (name.toLowerCase(Locale.ENGLISH).trim().equals("status")) { Status status = Status.CLOSED; // default try { status = Status.valueOf(st.nextToken().trim()); } catch (IllegalArgumentException iae) { logger.error( "failed to load volume.info: status attribute is set to an illegal value {vol='" + toString() + "'}"); logger.error("volume will be set closed (due to error)"); } setStatusNoAssertions(status); } ; } // we make sure that NEW entries become UNUSED if (getStatus() == Volume.Status.NEW) setStatus(Volume.Status.UNUSED); // make sure that volume closed date is not set, when volume is active if (getStatus() == Volume.Status.ACTIVE && getClosedDate() != null) { setClosedDate(null); } } catch (Exception e) { logger.debug("failed to read volumeinfo {" + toString() + "}", e); } }
From source file:org.apache.jxtadoop.hdfs.server.common.Storage.java
protected void writeCorruptedData(RandomAccessFile file) throws IOException { final String messageForPreUpgradeVersion = "\nThis file is INTENTIONALLY CORRUPTED so that versions\n" + "of Hadoop prior to 0.13 (which are incompatible\n" + "with this directory layout) will fail to start.\n"; file.seek(0); file.writeInt(FSConstants.LAYOUT_VERSION); org.apache.jxtadoop.io.UTF8.writeString(file, ""); file.writeBytes(messageForPreUpgradeVersion); file.getFD().sync();//from w w w .ja v a2 s . co m }
From source file:com.android.volley.toolbox.DownloadNetwork.java
@Override public NetworkResponse performRequest(Request<?> request) throws VolleyError { long requestStart = SystemClock.elapsedRealtime(); while (true) { HttpResponse httpResponse = null; byte[] responseContents = null; Map<String, String> responseHeaders = Collections.emptyMap(); RandomAccessFile acessfile = null; File file = null;/*w w w . j a v a 2 s .c o m*/ try { if (!(request instanceof DownOrUpRequest)) { throw new IllegalArgumentException("request object mast be DownOrUpRequest???"); } DownOrUpRequest requestDown = (DownOrUpRequest) request; // Gather headers. Map<String, String> headers = new HashMap<String, String>(); // Download have no cache file = getFile(requestDown); acessfile = new RandomAccessFile(file, "rws"); long length = acessfile.length(); acessfile.seek(length); if (length != 0) { headers.put("Range", "bytes=" + length + "-");// } httpResponse = mHttpStack.performRequest(requestDown, headers); StatusLine statusLine = httpResponse.getStatusLine(); int statusCode = statusLine.getStatusCode(); responseHeaders = convertHeaders(httpResponse.getAllHeaders()); // if the request is slow, log it. long requestLifetime = SystemClock.elapsedRealtime() - requestStart; logSlowRequests(requestLifetime, requestDown, responseContents, statusLine); if (statusCode < 200 || statusCode > 299) { acessfile.close(); throw new IOException(); } // Some responses such as 204s do not have content. We must check. if (httpResponse.getEntity() != null) { responseContents = entityToBytes(httpResponse.getEntity(), requestDown, acessfile); } else { // Add 0 byte response as a way of honestly representing a // no-content request. responseContents = new byte[0]; } acessfile.close(); String re = null; if (!requestDown.isCanceled() || requestDown.getmMaxLength() == requestDown.getmCurLength()) { String path = file.getAbsolutePath(); String re_name = ((DownOrUpRequest) request).getmDownloadName(); if (re_name != null) { re = path.substring(0, path.lastIndexOf('/')) + "/" + re_name; } else { re = path.substring(0, path.lastIndexOf(".")); } File rename = new File(re); boolean result = file.renameTo(rename); if (!result) { Log.e(this.getClass().getName(), "?????:" + rename); throw new IOException( "????????"); } requestDown.setDownloadFile(rename); } else { re = file.getAbsolutePath(); statusCode = 209; } return new NetworkResponse(statusCode, re.getBytes(), responseHeaders, false, SystemClock.elapsedRealtime() - requestStart); } catch (SocketTimeoutException e) { attemptRetryOnException("socket", request, new TimeoutError()); } catch (ConnectTimeoutException e) { attemptRetryOnException("connection", request, new TimeoutError()); } catch (MalformedURLException e) { throw new RuntimeException("Bad URL " + request.getUrl(), e); } catch (IOException e) { if (acessfile != null) { try { acessfile.close(); file.delete(); } catch (IOException e1) { e1.printStackTrace(); } } int statusCode = 0; NetworkResponse networkResponse = null; if (httpResponse != null) { statusCode = httpResponse.getStatusLine().getStatusCode(); } else { throw new NoConnectionError(e); } VolleyLog.e("Unexpected response code %d for %s", statusCode, request.getUrl()); throw new NetworkError(networkResponse); } } }
From source file:dk.statsbiblioteket.util.LineReaderTest.java
public void dumpSpeedRA() throws Exception { Random random = new Random(); RandomAccessFile ra = new RandomAccessFile(logfile, "r"); long[] pos = getPositions(); // Warming up for (int i = 0; i < 1000; i++) { ra.seek(pos[random.nextInt(LINES)]); ra.readLine();/*w w w .j a va2 s . c o m*/ } Profiler profiler = new Profiler(); profiler.setExpectedTotal(SPEED_SEEKS); for (int i = 0; i < SPEED_SEEKS; i++) { ra.seek(pos[random.nextInt(LINES)]); ra.readLine(); profiler.beat(); } System.out.println("Performed " + SPEED_SEEKS + " RA seeks & " + "reads at " + Math.round(profiler.getBps(false)) + " seeks/second"); }