List of usage examples for java.io RandomAccessFile seek
public void seek(long pos) throws IOException
From source file:fr.bmartel.speedtest.SpeedTestTask.java
/** * Write upload POST request with file generated randomly. *//*from w w w . ja v a 2s . c om*/ public void writeUpload(final String hostname, final int port, final String uri, final int fileSizeOctet) { mSpeedTestMode = SpeedTestMode.UPLOAD; this.mHostname = hostname; this.mPort = port; mUploadFileSize = new BigDecimal(fileSizeOctet); mForceCloseSocket = false; mErrorDispatched = false; mUploadTempFileSize = 0; mTimeStart = System.currentTimeMillis(); connectAndExecuteTask(new Runnable() { @Override public void run() { if (mSocket != null && !mSocket.isClosed()) { RandomAccessFile uploadFile = null; final RandomGen randomGen = new RandomGen(); try { byte[] body = new byte[] {}; if (mSocketInterface.getUploadStorageType() == UploadStorageType.RAM_STORAGE) { /* generate a file with size of fileSizeOctet octet */ body = randomGen.generateRandomArray(fileSizeOctet); } else { uploadFile = randomGen.generateRandomFile(fileSizeOctet); uploadFile.seek(0); } final String head = "POST " + uri + " HTTP/1.1\r\n" + "Host: " + hostname + "\r\nAccept: " + "*/*\r\nContent-Length: " + fileSizeOctet + "\r\n\r\n"; mUploadTempFileSize = 0; final int uploadChunkSize = mSocketInterface.getUploadChunkSize(); final int step = fileSizeOctet / uploadChunkSize; final int remain = fileSizeOctet % uploadChunkSize; if (mSocket.getOutputStream() != null) { if (writeFlushSocket(head.getBytes()) != 0) { throw new SocketTimeoutException(); } mTimeStart = System.currentTimeMillis(); mTimeEnd = 0; if (mRepeatWrapper.isFirstUpload()) { mRepeatWrapper.setFirstUploadRepeat(false); mRepeatWrapper.setStartDate(mTimeStart); } if (mRepeatWrapper.isRepeatUpload()) { mRepeatWrapper.updatePacketSize(mUploadFileSize); } for (int i = 0; i < step; i++) { final byte[] chunk = SpeedTestUtils.readUploadData( mSocketInterface.getUploadStorageType(), body, uploadFile, mUploadTempFileSize, uploadChunkSize); if (writeFlushSocket(chunk) != 0) { throw new SocketTimeoutException(); } mUploadTempFileSize += uploadChunkSize; if (mRepeatWrapper.isRepeatUpload()) { mRepeatWrapper.updateTempPacketSize(uploadChunkSize); } if (!mReportInterval) { final SpeedTestReport report = mSocketInterface.getLiveUploadReport(); for (int j = 0; j < mListenerList.size(); j++) { mListenerList.get(j).onUploadProgress(report.getProgressPercent(), report); } } } final byte[] chunk = SpeedTestUtils.readUploadData( mSocketInterface.getUploadStorageType(), body, uploadFile, mUploadTempFileSize, remain); if (remain != 0 && writeFlushSocket(chunk) != 0) { throw new SocketTimeoutException(); } else { mUploadTempFileSize += remain; if (mRepeatWrapper.isRepeatUpload()) { mRepeatWrapper.updateTempPacketSize(remain); } } if (!mReportInterval) { final SpeedTestReport report = mSocketInterface.getLiveUploadReport(); for (int j = 0; j < mListenerList.size(); j++) { mListenerList.get(j).onUploadProgress(SpeedTestConst.PERCENT_MAX.floatValue(), report); } } } } catch (SocketTimeoutException e) { mReportInterval = false; mErrorDispatched = true; closeSocket(); closeExecutors(); if (!mForceCloseSocket) { SpeedTestUtils.dispatchSocketTimeout(mForceCloseSocket, mListenerList, false, SpeedTestConst.SOCKET_WRITE_ERROR); } else { SpeedTestUtils.dispatchError(mForceCloseSocket, mListenerList, false, e.getMessage()); } } catch (IOException e) { mReportInterval = false; mErrorDispatched = true; closeExecutors(); SpeedTestUtils.dispatchError(mForceCloseSocket, mListenerList, false, e.getMessage()); } finally { if (uploadFile != null) { try { uploadFile.close(); randomGen.deleteFile(); } catch (IOException e) { //e.printStackTrace(); } } } } } }, false); }
From source file:it.drwolf.ridire.session.CrawlerManager.java
private long getURICount(Job job, String whichCount, User currentUser) throws IOException, HeritrixException, DocumentException, XPathExpressionException, SAXException { // this.updateJobsList(currentUser); Pattern pURICount = Pattern.compile( "\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}Z\\s+(\\d+)\\s+(\\d+)\\s+(\\d+)", Pattern.MULTILINE); String jobName = job.getName(); Job j = this.getPersistedJob(jobName); if (j == null) { return 0L; }//w w w .j a v a 2s . co m if (job.getChildJobName() != null && job.getChildJobName().length() > 0) { jobName = job.getChildJobName(); } String dir = this.entityManager.find(Parameter.class, Parameter.JOBS_DIR.getKey()).getValue(); long uriCountFromCrawlReport = 0L; long queuedURICount = 0L; long discoveredURICount = 0L; HttpMethod method = null; String jobStatus = this.getJobStatus(jobName); // jobName = jobName.replaceAll(" ", "\\\\ "); try { while (true) { if (jobStatus.equals(CrawlStatus.RUNNING.toString())) { RandomAccessFile progressStatistics = null; try { progressStatistics = new RandomAccessFile(this.jobsDir + CrawlerManager.FILE_SEPARATOR + jobName + CrawlerManager.FILE_SEPARATOR + "logs" + CrawlerManager.FILE_SEPARATOR + "progress-statistics.log", "r"); if (progressStatistics != null) { progressStatistics.seek(Math.max(0, progressStatistics.length() - 3000)); String line = progressStatistics.readLine(); StringBuffer buffer = new StringBuffer(); while (line != null) { buffer.append(line + "\n"); line = progressStatistics.readLine(); } String progressStatisticsContent = buffer.toString(); Matcher m = pURICount.matcher(progressStatisticsContent); int start = 0; long queuedURICountTemp = 0L; long discoveredURICountTemp = 0L; long uriCountFromCrawlReportTemp = 0L; while (m.find(start)) { start = m.end(); queuedURICountTemp = Long.parseLong(m.group(2)); discoveredURICountTemp = Long.parseLong(m.group(1)); uriCountFromCrawlReportTemp = Long.parseLong(m.group(3)); } queuedURICount += queuedURICountTemp; discoveredURICount = discoveredURICountTemp; uriCountFromCrawlReport = uriCountFromCrawlReportTemp; } } catch (FileNotFoundException e) { // TODO: handle exception } finally { if (progressStatistics != null) { progressStatistics.close(); } } break; } else if (whichCount.equalsIgnoreCase("finishedURICount")) { File reportFile = new File( dir + CrawlerManager.FILE_SEPARATOR + jobName + CrawlerManager.FILE_SEPARATOR + "reports" + CrawlerManager.FILE_SEPARATOR + "crawl-report.txt"); if (reportFile.exists() && reportFile.canRead()) { String content = FileUtils.readFileToString(reportFile); Matcher m = CrawlerManager.pFinishedURICount.matcher(content); if (m.find()) { String bytes = m.group(1); uriCountFromCrawlReport += Long.parseLong(bytes); } } Matcher m = CrawlerManager.childJobPattern.matcher(jobName); if (m.matches()) { Integer count = Integer.parseInt(m.group(1)); if (count > 1) { count--; jobName = jobName.substring(0, jobName.indexOf("__")) + "__" + count; } else if (count == 1) { jobName = jobName.substring(0, jobName.indexOf("__")); } else { break; } } else { break; } } else { return 0L; } } } finally { if (method != null) { method.releaseConnection(); } } if (whichCount.equals("discoveredUriCount")) { return discoveredURICount; } if (whichCount.equals("queuedUriCount")) { return queuedURICount; } return uriCountFromCrawlReport; }
From source file:org.commoncrawl.service.crawler.CrawlList.java
private static void appendTargetsToLogFile(File logFileName, IntrusiveList<CrawlTarget> list) throws IOException { LogFileHeader header = new LogFileHeader(); boolean preExistingHeader = logFileName.exists(); RandomAccessFile file = new RandomAccessFile(logFileName, "rw"); try {/* w w w .j a v a 2 s . com*/ long headerOffset = 0; if (preExistingHeader) { headerOffset = readLogFileHeader(file, header); if (header._writePos == 0) { file.seek(headerOffset); } else { // seelk to appropriate write position file.seek(header._writePos); } } else { headerOffset = writeLogFileHeader(file, header); } CustomByteArrayOutputStream bufferOutputStream = new CustomByteArrayOutputStream(1 << 17); DataOutputStream dataOutputStream = new DataOutputStream(bufferOutputStream); CRC32 crc = new CRC32(); for (CrawlTarget target : list) { PersistentCrawlTarget persistentTarget = target.createPersistentTarget(); bufferOutputStream.reset(); // write to intermediate stream ... persistentTarget.write(dataOutputStream); // and crc the data ... crc.reset(); crc.update(bufferOutputStream.getBuffer(), 0, bufferOutputStream.size()); // write out length first file.writeInt(bufferOutputStream.size()); //crc next long computedValue = crc.getValue(); //TODO: waste of space - write 32 bit values as long because having problems with java sign promotion rules during read... file.writeLong(computedValue); // and then the data file.write(bufferOutputStream.getBuffer(), 0, bufferOutputStream.size()); } // now update header ... header._itemCount += list.size(); header._writePos = file.getFilePointer(); // now write out header anew ... writeLogFileHeader(file, header); } finally { if (file != null) { file.close(); } } }
From source file:com.turn.griffin.data.GriffinUploadTask.java
private void uploadFile(FileInfo fileInfo, BitSet availableBlockBitmap) { String filename = fileInfo.getFilename(); long fileVersion = fileInfo.getVersion(); long blockCount = fileInfo.getBlockCount(); long blockSize = fileInfo.getBlockSize(); byte[] buffer = new byte[(int) blockSize]; GriffinLibCacheUtil libCacheManager = dataManager.getLibCacheManager().get(); String dataTopicNameForProducer = GriffinKafkaTopicNameUtil.getDataTopicNameForProducer(filename, fileVersion);/*from w ww . j a v a 2s . co m*/ GriffinProducer producer = null; try { String libCacheUploadFilePath = libCacheManager.getUploadFilePath(fileInfo); RandomAccessFile libCacheUploadFile = new RandomAccessFile(libCacheUploadFilePath, "r"); producer = new GriffinProducer(GriffinModule.BROKERS); logger.info(String.format("Starting to push %s", fileInfo.toString().replaceAll(System.getProperty("line.separator"), " "))); int uploadAttempts = 0; while (availableBlockBitmap.nextClearBit(0) != blockCount) { /* If a new version has arrived abort uploading older version */ if (!libCacheManager.isLatestGlobalVersion(fileInfo)) { logger.info( String.format("Aborting upload for %s version %s as a newer version is now available.", filename, fileVersion)); break; } if (uploadAttempts >= maxUploadAttempts) { logger.warn(String.format("Unable to upload %s version %s after %s attempts", filename, fileVersion, uploadAttempts)); String subject = String.format("WARNING: GriffinUploadTask failed for blob:%s", filename); String body = String.format( "Action: GriffinUploadTask failed for blob:%s version:%s%n" + "Reason: Unable to upload after %s attempts%n", filename, fileVersion, uploadAttempts); GriffinModule.emailAlert(subject, body); break; } int blockToUpload = availableBlockBitmap.nextClearBit(0); libCacheUploadFile.seek(blockToUpload * blockSize); int bytesRead = libCacheUploadFile.read(buffer); DataMessage msg = DataMessage.newBuilder().setBlockSeqNo(blockToUpload).setByteCount(bytesRead) .setData(ByteString.copyFrom(buffer)).build(); try { producer.send(dataTopicNameForProducer, DigestUtils.md5Hex(buffer), msg); availableBlockBitmap.set(blockToUpload); uploadAttempts = 0; } catch (FailedToSendMessageException ftsme) { /* Retry the same block again */ logger.warn(String.format("Unable to send block %s for file: %s version: %s " + "due to FailedToSendMessageException", blockToUpload, filename, fileVersion)); uploadAttempts++; } catch (Exception e) { logger.warn(String.format("Unable to send block %s for file: %s version: %s", blockToUpload, filename, fileVersion), e); logger.warn("Exception", e); uploadAttempts++; } } logger.info(String.format("Ending file upload for file %s version %s to %s", filename, fileVersion, dataTopicNameForProducer)); libCacheUploadFile.close(); } catch (IOException | RuntimeException e) { logger.error(String.format("Unable to upload file %s to %s", filename, dataTopicNameForProducer), e); String subject = String.format("WARNING: GriffinUploadTask failed for blob:%s", filename); String body = String.format( "Action: GriffinUploadTask failed for blob:%s version:%s%n" + "Reason: Exception in GriffinUploadTask%n %s", filename, fileVersion, Throwables.getStackTraceAsString(e)); GriffinModule.emailAlert(subject, body); } finally { if (producer != null) { producer.shutdown(); } } }
From source file:dk.statsbiblioteket.util.LineReaderTest.java
public void dumpSpeed2Helper(LineReader lr, RandomAccessFile ra, boolean warmup) throws Exception { int seeks = 10000; Profiler profiler = new Profiler(); profiler.setExpectedTotal(seeks);//from w ww.j a v a2 s. c o m profiler.setBpsSpan(1000); long size = lr.length(); Random random = new Random(); profiler.reset(); for (int i = 0; i < seeks; i++) { long pos = Math.round(Math.floor(random.nextDouble() * (size - 6))); try { lr.seek(pos); } catch (EOFException e) { fail("Reached EOF at position " + pos); } lr.readInt(); profiler.beat(); } if (!warmup) { System.out.println("Seeked and read an int " + seeks + " times with LR " + "on a file of size " + size + " at " + Math.round(profiler.getBps(true)) + " seeks/second"); } profiler.reset(); for (int i = 0; i < seeks; i++) { long pos = Math.round(Math.floor(random.nextDouble() * (size - 6))); try { ra.seek(pos); } catch (EOFException e) { fail("Reached EOF at position " + pos); } ra.readInt(); profiler.beat(); } if (!warmup) { System.out.println("Seeked and read an int " + seeks + " times with RA " + "on a file of size " + size + " at " + Math.round(profiler.getBps(true)) + " seeks/second"); } }
From source file:org.commoncrawl.service.listcrawler.CrawlHistoryManager.java
/** * /*from w w w.j a v a 2 s. c o m*/ * @return a sorted map of urlfp to item * @throws IOException */ TreeMap<URLFP, ProxyCrawlHistoryItem> loadLocalLogItemMap() throws IOException { TreeMap<URLFP, ProxyCrawlHistoryItem> itemMap = new TreeMap<URLFP, ProxyCrawlHistoryItem>(); LOG.info("Reading Local Log File"); RandomAccessFile file = new RandomAccessFile(getActiveLogFilePath(), "rw"); // valid length indicator ... long validLength = 0; try { // skip header ... file.seek(LocalLogFileHeader.SIZE); validLength = file.getFilePointer(); // ok walk n items ... for (int itemIdx = 0; itemIdx < _header._itemCount && file.getChannel().position() <= _header._fileSize; ++itemIdx) { try { ProxyCrawlHistoryItem item = readItem(file); // update valid length ... validLength = file.getFilePointer(); // ok compute fingerprint for item ... URLFP fingerprintObject = URLUtils.getURLFPFromURL(item.getOriginalURL(), true); if (fingerprintObject == null) { LOG.error("Could not compute fingerprint for URL:" + item.getOriginalURL()); } else { itemMap.put(fingerprintObject, item); } } catch (IOException e) { LOG.error(CCStringUtils.stringifyException(e)); try { if (!seekToNextSyncBytesPos(file)) { LOG.error("Hit EOF While Seeking for next SyncByte Sequence!"); break; } else { LOG.info("Seek to Next SyncByte Succeeded! Continuing Load"); } } catch (IOException e2) { LOG.error(CCStringUtils.stringifyException(e2)); LOG.error("Got IO Exception Reading SyncBytes - Bailing!"); break; } } } } finally { if (file.length() > validLength) { LOG.warn("File Length is:" + file.length() + " Truncating Length to:" + validLength); file.setLength(validLength); } file.close(); } LOG.info("Done Reading Local Log File"); return itemMap; }
From source file:org.commoncrawl.service.crawler.CrawlList.java
private static int readTargetsFromLogFile(CrawlList domain, File logFileName, int desiredReadAmount, IntrusiveList<CrawlTarget> targetsOut) throws IOException { int itemsRead = 0; if (logFileName.exists()) { RandomAccessFile file = new RandomAccessFile(logFileName, "rw"); LogFileHeader header = new LogFileHeader(); try {/*from w ww.java 2 s . c o m*/ long headerOffset = readLogFileHeader(file, header); // seelk to appropriate write position if (header._readPos != 0) file.seek(header._readPos); int itemsToRead = Math.min(desiredReadAmount, header._itemCount); PersistentCrawlTarget persistentTarget = new PersistentCrawlTarget(); CRC32 crc = new CRC32(); CustomByteArrayOutputStream buffer = new CustomByteArrayOutputStream(1 << 16); for (int i = 0; i < itemsToRead; ++i) { // read length ... int urlDataLen = file.readInt(); long urlDataCRC = file.readLong(); buffer.reset(); if (urlDataLen > buffer.getBuffer().length) { buffer = new CustomByteArrayOutputStream(((urlDataLen / 65536) + 1) * 65536); } file.read(buffer.getBuffer(), 0, urlDataLen); crc.reset(); crc.update(buffer.getBuffer(), 0, urlDataLen); long computedValue = crc.getValue(); // validate crc values ... if (computedValue != urlDataCRC) { throw new IOException("Crawl Target Log File Corrupt"); } else { //populate a persistentTarget from the (in memory) data stream DataInputStream bufferReader = new DataInputStream( new ByteArrayInputStream(buffer.getBuffer(), 0, urlDataLen)); persistentTarget.clear(); persistentTarget.readFields(bufferReader); //populate a new crawl target structure ... CrawlTarget newTarget = new CrawlTarget(domain, persistentTarget); targetsOut.addTail(newTarget); } } itemsRead = itemsToRead; // now update header ... header._itemCount -= itemsRead; // now if item count is non zero ... if (header._itemCount != 0) { // set read cursor to next record location header._readPos = file.getFilePointer(); } // otherwise ... else { // reset both cursors ... header._readPos = 0; header._writePos = 0; } // now write out header anew ... writeLogFileHeader(file, header); } finally { if (file != null) { file.close(); } } } return itemsRead; }
From source file:org.commoncrawl.service.listcrawler.CacheManager.java
/** * updateLogFileHeader - update the log file header * called via the log file writer thread ... * @throws IOException//from w w w. java 2s . c o m */ void updateLogFileHeader(File logFileName, long newlyAddedItemsCount, long newItemsFileSize) throws IOException { RandomAccessFile file = new RandomAccessFile(logFileName, "rw"); try { synchronized (_header) { // update cached header ... _header._fileSize += newItemsFileSize; _header._itemCount += newlyAddedItemsCount; // set the position at zero .. file.seek(0); // and write header to disk ... _header.writeHeader(file); } } finally { // major bottle neck.. // file.getFD().sync(); file.close(); } }
From source file:com.mediatek.systemupdate.HttpManager.java
int writeFile(HttpResponse response, long currSize) { Xlog.i(TAG, "writeFile"); if (mDownloadInfo.getDLSessionStatus() != DownloadInfo.STATE_QUERYNEWVERSION) { //mNotification.clearNotification(NotifyManager.NOTIFY_DOWNLOADING); mNotification.showDownloadingNotificaton(mDownloadInfo.getVerNum(), (int) (((double) Util.getFileSize(Util.getPackageFileName(mContext)) / (double) mDownloadInfo.getUpdateImageSize()) * 100), true);/*from www . j a va 2 s. c om*/ } Util.cancelAlarm(mContext, Util.Action.ACTION_AUTO_DL_TIME_OUT); mDownloadInfo.setOtaAutoDlStatus(false); mDownloadInfo.setIfPauseWithinTime(false); try { // response.getParams().setIntParameter(CoreConnectionPNames.CONNECTION_TIMEOUT, // 10000); InputStream in = response.getEntity().getContent(); File ifolder = new File(Util.getPackagePathName(mContext)); if (!ifolder.exists()) { ifolder.mkdirs(); } RandomAccessFile out = null; String pkgFile = Util.getPackageFileName(mContext); if (pkgFile == null) { Xlog.e(TAG, "pkgFile is null"); mErrorCode = HTTP_DETECTED_SDCARD_CRASH_OR_UNMOUNT; return mErrorCode; } try { out = new RandomAccessFile(pkgFile, "rws"); out.seek(currSize); } catch (IOException e) { e.printStackTrace(); onShutdownConn(); mErrorCode = HTTP_DETECTED_SDCARD_CRASH_OR_UNMOUNT; return mErrorCode; } byte[] buff = new byte[4096]; int rc = 0; int i = 0; int j = 0; boolean rightnow = false; boolean finish = false; File fPkg = new File(pkgFile); if (fPkg == null) { out.close(); mErrorCode = HTTP_DETECTED_SDCARD_CRASH_OR_UNMOUNT; return mErrorCode; } while ((rc = in.read(buff, 0, 4096)) > 0) { // to-do: handle Intent.ACTION_MEDIA_EJECT /* * synchronized (this) { if (mEjectFlag) { try { out.close(); } * catch (IOException e) { e.printStackTrace(); } * onShutdownConn(); return mErrorCode = * HTTP_DETECTED_SDCARD_CRASH_OR_UNMOUNT; } } */ try { if (fPkg.exists()) { out.write(buff, 0, rc); } else { Xlog.e(TAG, "file not exist during downloading "); setPauseState(); out.close(); onShutdownConn(); mErrorCode = HTTP_FILE_NOT_EXIST; sendErrorMessage(); return mErrorCode; } } catch (IOException e) { e.printStackTrace(); out.close(); onShutdownConn(); mErrorCode = HTTP_DETECTED_SDCARD_CRASH_OR_UNMOUNT; return mErrorCode; } i++; int status = mDownloadInfo.getDLSessionStatus(); if (status == DownloadInfo.STATE_PAUSEDOWNLOAD || status == DownloadInfo.STATE_QUERYNEWVERSION) { Xlog.i(TAG, "writeFile, DownloadInfo = " + status); mCookies = null; finish = false; out.close(); onShutdownConn(); return 0; } if (mHandler == null) { if (rightnow) { i = 200; rightnow = false; } if (i == 200) { onDownloadProcessUpdate(); i = 0; } } else { if (!rightnow) { i = 18; rightnow = true; } if (i == 20) { i = 0; onDownloadProcessUpdate(); } } j++; if (j == 20) { onTransferRatio(); j = 0; } finish = true; } Xlog.i(TAG, "writeFile, finish, rc = " + rc + "bytes" + ". finish = " + finish); if (finish) { onTransferRatio(); onDownloadProcessUpdate(); } long curSize = Util.getFileSize(Util.getPackageFileName(mContext)); Xlog.i(TAG, "curSize = " + curSize + " mNewVersionInfo.mSize = " + mDownloadInfo.getUpdateImageSize()); out.close(); if (curSize >= mDownloadInfo.getUpdateImageSize()) { onShutdownConn(); return 0; } } catch (SocketTimeoutException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } showNoNetworkToast(); if (mDownloadInfo.getDLSessionStatus() == DownloadInfo.STATE_DOWNLOADING) { setPauseState(); Xlog.e(TAG, "writeFile, exception to set pause state"); mDownloadInfo.setOtaAutoDlStatus(true); mDownloadInfo.setIfPauseWithinTime(true); Util.setAlarm(mContext, AlarmManager.RTC, Calendar.getInstance().getTimeInMillis() + AUTO_DL_TIME, Util.Action.ACTION_AUTO_DL_TIME_OUT); } onShutdownConn(); mErrorCode = HTTP_RESPONSE_NETWORK_ERROR; sendErrorMessage(); return mErrorCode; }
From source file:net.ontopia.infoset.content.FileContentStore.java
private void allocateNewBlock() throws ContentStoreException { RandomAccessFile out = null; boolean exception_thrown = false; try {/* w w w . ja v a 2 s . c o m*/ out = new RandomAccessFile(key_file, "rws"); for (int i = 0; i < MAX_SPINS; i++) { // acquire exclusive lock FileLock l = out.getChannel().tryLock(); if (l == null) { // wait a little before trying again try { Thread.sleep(SPIN_TIMEOUT); } catch (InterruptedException e) { } continue; } else { try { // allocate new key int old_key; int new_key; String content = null; if (out.length() == 0) { old_key = 0; new_key = old_key + KEY_BLOCK_SIZE; } else { try { content = out.readUTF(); old_key = Integer.parseInt(content); new_key = old_key + KEY_BLOCK_SIZE; } catch (NumberFormatException e) { if (content.length() > 100) content = content.substring(0, 100) + "..."; throw new ContentStoreException( "Content store key file corrupted. Contained: '" + content + "'"); } } // truncate key file and write out new key out.seek(0); out.writeUTF(Integer.toString(new_key)); end_of_key_block = new_key; last_key = old_key; return; } finally { // release file lock try { l.release(); } catch (Throwable t) { throw new ContentStoreException("Could not release key file lock.", t); } } } } throw new ContentStoreException("Block allocation timed out."); } catch (ContentStoreException e) { exception_thrown = true; throw e; } catch (Throwable t) { exception_thrown = true; throw new ContentStoreException(t); } finally { if (out != null) { try { out.close(); } catch (IOException e) { if (!exception_thrown) throw new ContentStoreException("Problems occurred when closing content store.", e); } } } }