List of usage examples for java.io RandomAccessFile close
public void close() throws IOException
From source file:com.qubole.rubix.core.RemoteReadRequestChain.java
public Integer call() throws IOException { Thread.currentThread().setName(threadName); checkState(isLocked, "Trying to execute Chain without locking"); if (readRequests.size() == 0) { return 0; }/*from w ww . java 2 s . co m*/ RandomAccessFile localFile = null; FileChannel fc = null; try { localFile = new RandomAccessFile(localFilename, "rw"); fc = localFile.getChannel(); for (ReadRequest readRequest : readRequests) { log.debug(String.format("Executing ReadRequest: [%d, %d, %d, %d, %d]", readRequest.getBackendReadStart(), readRequest.getBackendReadEnd(), readRequest.getActualReadStart(), readRequest.getActualReadEnd(), readRequest.getDestBufferOffset())); inputStream.seek(readRequest.backendReadStart); MappedByteBuffer mbuf = fc.map(FileChannel.MapMode.READ_WRITE, readRequest.backendReadStart, readRequest.getBackendReadLength()); log.debug(String.format("Mapped file from %d till length %d", readRequest.backendReadStart, readRequest.getBackendReadLength())); /* * MappedByteBuffer does not provide backing byte array, so cannot write directly to it via FSDataOutputStream.read * Instead, download to normal destination buffer (+offset buffer to get block boundaries) and then copy to MappedByteBuffer */ int prefixBufferLength = (int) (readRequest.getActualReadStart() - readRequest.getBackendReadStart()); int suffixBufferLength = (int) (readRequest.getBackendReadEnd() - readRequest.getActualReadEnd()); log.debug( String.format("PrefixLength: %d SuffixLength: %d", prefixBufferLength, suffixBufferLength)); // TODO: use single byte buffer for all three streams /* TODO: also GC cost can be lowered by shared buffer pool, a small one. IOUtils.copyLarge method. A single 4kB byte buffer can be used to copy whole file */ if (prefixBufferLength > 0) { byte[] prefixBuffer = new byte[prefixBufferLength]; log.debug(String.format("Trying to Read %d bytes into prefix buffer", prefixBufferLength)); totalPrefixRead += readAndCopy(prefixBuffer, 0, mbuf, prefixBufferLength); log.debug(String.format("Read %d bytes into prefix buffer", prefixBufferLength)); } log.debug(String.format("Trying to Read %d bytes into destination buffer", readRequest.getActualReadLength())); int readBytes = readAndCopy(readRequest.getDestBuffer(), readRequest.destBufferOffset, mbuf, readRequest.getActualReadLength()); totalRequestedRead += readBytes; log.debug(String.format("Read %d bytes into destination buffer", readBytes)); if (suffixBufferLength > 0) { // If already in reading actually required data we get a eof, then there should not have been a suffix request checkState(readBytes == readRequest.getActualReadLength(), "Acutal read less than required, still requested for suffix"); byte[] suffixBuffer = new byte[suffixBufferLength]; log.debug(String.format("Trying to Read %d bytes into suffix buffer", suffixBufferLength)); totalSuffixRead += readAndCopy(suffixBuffer, 0, mbuf, suffixBufferLength); log.debug(String.format("Read %d bytes into suffix buffer", suffixBufferLength)); } } } finally { if (fc != null) { fc.close(); } if (localFile != null) { localFile.close(); } } log.info(String.format("Read %d bytes from remote file, added %d to destination buffer", totalPrefixRead + totalRequestedRead + totalSuffixRead, totalRequestedRead)); return totalRequestedRead; }
From source file:com.kodemore.utility.Kmu.java
/** * Call e.close, but wrap the call in a try/catch block * that logs any exception without throwing it. *///from w w w. j a v a 2 s. c om public static void closeSafely(RandomAccessFile e) { try { if (e != null) e.close(); } catch (Exception ex) { KmLog.error(ex, "Cannot close random access."); } }
From source file:org.apache.catalina.servlets.DefaultServlet.java
/** * Handle a partial PUT. New content specified in request is appended to * existing content in oldRevisionContent (if present). This code does * not support simultaneous partial updates to the same resource. * * @param req Description of the Parameter * @param range Description of the Parameter * @param path Description of the Parameter * @return Description of the Return Value * @throws IOException Description of the Exception *//*from w w w.ja va2 s . c o m*/ protected File executePartialPut(HttpServletRequest req, Range range, String path) throws IOException { // Append data specified in ranges to existing content for this // resource - create a temp. file on the local filesystem to // perform this operation File tempDir = (File) getServletContext().getAttribute("javax.servlet.context.tempdir"); // Convert all '/' characters to '.' in resourcePath String convertedResourcePath = path.replace('/', '.'); File contentFile = new File(tempDir, convertedResourcePath); if (contentFile.createNewFile()) { // Clean up contentFile when Tomcat is terminated contentFile.deleteOnExit(); } RandomAccessFile randAccessContentFile = new RandomAccessFile(contentFile, "rw"); Resource oldResource = null; try { Object obj = getResources().lookup(path); if (obj instanceof Resource) { oldResource = (Resource) obj; } } catch (NamingException e) { } // Copy data in oldRevisionContent to contentFile if (oldResource != null) { BufferedInputStream bufOldRevStream = new BufferedInputStream(oldResource.streamContent(), BUFFER_SIZE); int numBytesRead; byte[] copyBuffer = new byte[BUFFER_SIZE]; while ((numBytesRead = bufOldRevStream.read(copyBuffer)) != -1) { randAccessContentFile.write(copyBuffer, 0, numBytesRead); } bufOldRevStream.close(); } randAccessContentFile.setLength(range.length); // Append data in request input stream to contentFile randAccessContentFile.seek(range.start); int numBytesRead; byte[] transferBuffer = new byte[BUFFER_SIZE]; BufferedInputStream requestBufInStream = new BufferedInputStream(req.getInputStream(), BUFFER_SIZE); while ((numBytesRead = requestBufInStream.read(transferBuffer)) != -1) { randAccessContentFile.write(transferBuffer, 0, numBytesRead); } randAccessContentFile.close(); requestBufInStream.close(); return contentFile; }
From source file:com.polyvi.xface.extension.advancedfiletransfer.FileDownloader.java
@Override public void transfer(CallbackContext callbackCtx) { initDownloadInfo();/*from w ww. j a va 2s .co m*/ if (mState == DOWNLOADING) { return; } mCallbackCtx = callbackCtx; if (null == mDownloadInfo) { onError(CONNECTION_ERR); } else { setState(DOWNLOADING); new Thread(new Runnable() { @Override public void run() { HttpURLConnection connection = null; RandomAccessFile randomAccessFile = null; InputStream is = null; int retry = RETRY; // TODO:????? do { int completeSize = mDownloadInfo.getCompleteSize(); try { URL url = new URL(mUrl); connection = (HttpURLConnection) url.openConnection(); connection.setConnectTimeout(TIME_OUT_MILLISECOND); connection.setRequestMethod("GET"); // ?Rangebytes x-; connection.setRequestProperty("Range", "bytes=" + completeSize + "-"); // cookie setCookieProperty(connection, mUrl); // ?.temp randomAccessFile = new RandomAccessFile(mLocalFilePath + TEMP_FILE_SUFFIX, "rwd"); randomAccessFile.seek(completeSize); // ??? is = connection.getInputStream(); byte[] buffer = new byte[mBufferSize]; int length = -1; while ((length = is.read(buffer)) != -1) { try { randomAccessFile.write(buffer, 0, length); } catch (Exception e) { retry = -1; break; } completeSize += length; onProgressUpdated(completeSize, mDownloadInfo.getTotalSize()); mDownloadInfo.setCompleteSize(completeSize); if (PAUSE == mState) { break; } } if (mDownloadInfo.isDownloadCompleted()) { // ??.temp renameFile(mLocalFilePath + TEMP_FILE_SUFFIX, mLocalFilePath); onSuccess(); break; } } catch (FileNotFoundException e) { onError(FILE_NOT_FOUND_ERR); XLog.e(CLASS_NAME, e.getMessage()); } catch (IOException e) { if (retry <= 0) { onError(CONNECTION_ERR); XLog.e(CLASS_NAME, e.getMessage()); } // ,?1? try { Thread.sleep(RETRY_INTERVAL); } catch (InterruptedException ex) { XLog.e(CLASS_NAME, "sleep be interrupted", ex); } } finally { try { if (null != is) { is.close(); } if (null != randomAccessFile) { // new URL??randomAccessFilenull randomAccessFile.close(); } if (null != connection) { // new URL??connectionnull connection.disconnect(); } } catch (IOException e) { XLog.e(CLASS_NAME, e.getMessage()); } } } while ((DOWNLOADING == mState) && (0 < retry--)); } }).start(); } }
From source file:org.commoncrawl.service.listcrawler.CrawlList.java
void loadSubDomainMetadataFromDisk() throws IOException { LOG.info("*** LIST:" + getListId() + " LOAD SUBDOMAIN METADATA FROM DISK ... "); if (_subDomainMetadataFile.exists()) { LOG.info("*** LIST:" + getListId() + " FILE EXISTS LOADING SUBDOMAIN DATA FROM DISK."); RandomAccessFile file = new RandomAccessFile(_subDomainMetadataFile, "rw"); DataInputBuffer inputBuffer = new DataInputBuffer(); byte fixedDataBlock[] = new byte[CrawlListMetadata.Constants.FixedDataSize]; try {//from www.j a v a 2s . com // skip version file.read(); // read item count int itemCount = file.readInt(); LOG.info("*** LIST:" + getListId() + " SUBDOMAIN ITEM COUNT:" + itemCount); CrawlListMetadata newMetadata = new CrawlListMetadata(); TreeMap<Long, Integer> idToOffsetMap = new TreeMap<Long, Integer>(); for (int i = 0; i < itemCount; ++i) { long orignalPos = file.getFilePointer(); file.readFully(fixedDataBlock, 0, fixedDataBlock.length); inputBuffer.reset(fixedDataBlock, fixedDataBlock.length); try { newMetadata.deserialize(inputBuffer, new BinaryProtocol()); } catch (Exception e) { LOG.error("-----Failed to Deserialize Metadata at Index:" + i + " Exception:" + CCStringUtils.stringifyException(e)); } idToOffsetMap.put(newMetadata.getDomainHash(), (int) orignalPos); } // write lookup table _offsetLookupTable = new DataOutputBuffer(idToOffsetMap.size() * OFFSET_TABLE_ENTRY_SIZE); for (Map.Entry<Long, Integer> entry : idToOffsetMap.entrySet()) { _offsetLookupTable.writeLong(entry.getKey()); _offsetLookupTable.writeInt(entry.getValue()); } } finally { file.close(); } LOG.info("*** LIST:" + getListId() + " DONE LOADING SUBDOMAIN DATA FROM DISK"); } else { LOG.info("*** LIST:" + getListId() + " SUBDOMAIN METADATA DOES NOT EXIST! LOADING FROM SCRATCH"); RandomAccessFile fixedDataReader = new RandomAccessFile(_fixedDataFile, "rw"); RandomAccessFile stringDataReader = new RandomAccessFile(_variableDataFile, "rw"); try { //ok rebuild top level metadata as well _metadata.clear(); OnDiskCrawlHistoryItem item = new OnDiskCrawlHistoryItem(); int processedCount = 0; while (fixedDataReader.getFilePointer() != fixedDataReader.length()) { long position = fixedDataReader.getFilePointer(); // store offset in item item._fileOffset = position; // load from disk item.deserialize(fixedDataReader); try { // seek to string data stringDataReader.seek(item._stringsOffset); // and skip buffer length WritableUtils.readVInt(stringDataReader); // and read primary string String url = stringDataReader.readUTF(); // get metadata object for subdomain CrawlListMetadata subDomainMetadata = getTransientSubDomainMetadata(url); // increment url count subDomainMetadata.setUrlCount(subDomainMetadata.getUrlCount() + 1); // increment top level metadata count _metadata.setUrlCount(_metadata.getUrlCount() + 1); // update top level metadata .. updateMetadata(item, _metadata, 0); // update sub-domain metadata object from item data updateMetadata(item, subDomainMetadata, 0); ++processedCount; } catch (IOException e) { LOG.error("Exception Reading String Data For Item:" + (processedCount + 1)); LOG.error("Exception:" + CCStringUtils.stringifyException(e)); LOG.error("File Position:" + fixedDataReader.getFilePointer() + " StringsPointer:" + stringDataReader.getFilePointer()); } if (processedCount % 10000 == 0) { LOG.info("*** LIST:" + getListId() + " Processed:" + processedCount + " Items"); } } // ok commit top level metadata to disk as well writeMetadataToDisk(); } catch (IOException e) { LOG.error("Encountered Exception Queueing Items for List:" + _listId + " Exception:" + CCStringUtils.stringifyException(e)); LOG.error("File Position:" + fixedDataReader.getFilePointer() + " StringsPointer:" + stringDataReader.getFilePointer()); _queueState = QueueState.QUEUED; } finally { fixedDataReader.close(); stringDataReader.close(); } LOG.info("*** LIST:" + getListId() + " SUBDOMAIN METADATA REBUILT FROM LIST DATA . WRITING TO DISK"); // write metadat to disk writeInitialSubDomainMetadataToDisk(); LOG.info("*** LIST:" + getListId() + " SUBDOMAIN METADATA REBUILT FROM LIST DATA . WRITE COMPLETE"); } }
From source file:com.joey.software.MoorFLSI.RepeatImageTextReader.java
public void loadTextData(File file) { try {/* www. ja v a2 s .c o m*/ RandomAccessFile in = new RandomAccessFile(file, "r"); // Skip header in.readLine(); in.readLine(); in.readLine(); // Skip Subject Information in.readLine(); in.readLine(); in.readLine(); in.readLine(); in.readLine(); in.readLine(); String startTimeInput = in.readLine(); String commentsInput = in.readLine(); String data = in.readLine(); while (!data.startsWith("2) System Configuration")) { commentsInput += data; data = in.readLine(); } // System configuration // in.readLine(); in.readLine(); String timeCounstantInput = in.readLine(); String cameraGainInput = in.readLine(); String exposureTimeInput = in.readLine(); in.readLine(); in.readLine(); in.readLine(); String resolutionInput = in.readLine(); // Time Data in.readLine(); String timeDataInput = in.readLine(); String totalImagesInput = in.readLine(); in.readLine(); in.readLine(); // in.readLine(); // System.out.println(in.readLine()); // in.readLine(); // Parse important Size high = (new Scanner(resolutionInput.split(":")[1])).nextInt(); wide = (new Scanner(resolutionInput.split(",")[1])).nextInt(); int tot = 1; try { tot = (new Scanner(totalImagesInput.split(":")[1])).nextInt(); } catch (Exception e) { } System.out.println(wide + "," + high); // Parse timeInformation SimpleDateFormat format = new SimpleDateFormat("hh:mm:ss (dd/MM/yy)"); Date startTime = null; try { startTime = format.parse(startTimeInput.split(": ")[1]); } catch (ParseException e) { // TODO Auto-generated catch block e.printStackTrace(); } String[] frameTimeData = timeDataInput.split("information:")[1].split(","); Date[] timeInfo = new Date[tot]; for (int i = 0; i < frameTimeData.length - 1; i++) { GregorianCalendar cal = new GregorianCalendar(); cal.setTime(startTime); String dat = (frameTimeData[i]); String[] timeVals = dat.split(":"); int hour = Integer.parseInt(StringOperations.removeNonNumber(timeVals[0])); int min = Integer.parseInt(StringOperations.removeNonNumber(timeVals[1])); int sec = Integer.parseInt(StringOperations.removeNonNumber(timeVals[2])); int msec = Integer.parseInt(StringOperations.removeNonNumber(timeVals[3])); cal.add(Calendar.HOUR_OF_DAY, hour); cal.add(Calendar.MINUTE, min); cal.add(Calendar.SECOND, sec); cal.add(Calendar.MILLISECOND, msec); timeInfo[i] = cal.getTime(); } // Parse Image Data /* * Close Random access file and switch to scanner first store pos * then move to correct point. */ long pos = in.getFilePointer(); in.close(); FileInputStream fIn = new FileInputStream(file); fIn.skip(pos); BufferedInputStream bIn = new BufferedInputStream(fIn); Scanner sIn = new Scanner(bIn); short[][][] holder = new short[tot][wide][high]; JFrame f = new JFrame(); f.setDefaultCloseOperation(WindowConstants.DISPOSE_ON_CLOSE); StatusBarPanel stat = new StatusBarPanel(); stat.setMaximum(high); f.getContentPane().setLayout(new BorderLayout()); f.getContentPane().add(stat, BorderLayout.CENTER); f.setSize(200, 60); f.setVisible(true); for (int i = 0; i < tot; i++) { // Skip over the heading values stat.setStatusMessage("Loading " + i + " of " + tot); sIn.useDelimiter("\n"); sIn.next(); sIn.next(); sIn.next(); if (i != 0) { sIn.next(); } sIn.reset(); for (int y = 0; y < high; y++) { stat.setValue(y); sIn.nextInt(); for (int x = 0; x < wide; x++) { holder[i][x][y] = sIn.nextShort(); } } addData(timeInfo[i], holder[i]); } // FrameFactroy.getFrame(new DynamicRangeImage(data[0])); // Start Image Data } catch (FileNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } }
From source file:edu.umass.cs.gigapaxos.SQLPaxosLogger.java
private static void mergeLogfiles(File prev, File cur, PaxosPacketizer packetizer, MessageLogDiskMap msgLog, FileIDMap fidMap) throws IOException, JSONException { File tmpFile = new File(cur.toString() + TMP_FILE_SUFFIX); RandomAccessFile rafTmp = null, rafPrev = null, rafCur = null; long t = System.currentTimeMillis(); try {/*from w w w. j a v a 2 s . co m*/ rafTmp = new RandomAccessFile(tmpFile.toString(), "rw"); rafPrev = new RandomAccessFile(prev.toString(), "r"); rafCur = new RandomAccessFile(cur.toString(), "r"); byte[] buf = new byte[1024]; int numRead = 0; // copy prev file to tmp file while ((numRead = rafPrev.read(buf)) > 0) rafTmp.write(buf, 0, numRead); // copy cur file to tmp file while ((numRead = rafCur.read(buf)) > 0) rafTmp.write(buf, 0, numRead); } finally { if (rafTmp != null) rafTmp.close(); if (rafPrev != null) rafPrev.close(); if (rafCur != null) rafCur.close(); } // copy tmp file index into memory HashMap<String, ArrayList<LogIndexEntry>> logIndexEntries = new HashMap<String, ArrayList<LogIndexEntry>>(); try { rafTmp = new RandomAccessFile(tmpFile.toString(), "r"); while (rafTmp.getFilePointer() < rafTmp.length()) { long offset = rafTmp.getFilePointer(); int length = rafTmp.readInt(); byte[] msg = new byte[length]; rafTmp.readFully(msg); PaxosPacket pp = packetizer != null ? packetizer.stringToPaxosPacket(msg // new String(msg, CHARSET) ) : PaxosPacket.getPaxosPacket(new String(msg, CHARSET)); assert (pp != null) : " read logged message " + new String(msg, CHARSET); if (!logIndexEntries.containsKey(pp.getPaxosID())) logIndexEntries.put(pp.getPaxosID(), new ArrayList<LogIndexEntry>()); logIndexEntries.get(pp.getPaxosID()).add(new LogIndexEntry(getSlot(pp), getBallot(pp).ballotNumber, getBallot(pp).coordinatorID, pp.getType().getInt(), cur.toString(), offset, length)); } } finally { if (rafTmp != null) rafTmp.close(); } // atomically copy tmpFile to cur, adjust log index, delete prev synchronized (msgLog) { modifyLogfileAndLogIndex(cur, tmpFile, logIndexEntries, msgLog, fidMap); if (prev.delete()) fidMap.remove(prev.toString()); } DelayProfiler.updateDelay("merge", t); log.log(Level.INFO, "{0} merged logfile {1} into {2}", new Object[] { msgLog, prev, cur }); }
From source file:ar.com.qbe.siniestros.model.utils.MimeMagic.MagicMatcher.java
/** * test to see if this match or any submatches match * * @param f the file that should be used to test the match * @param onlyMimeMatch DOCUMENT ME!//from ww w . j a v a 2 s .com * * @return the deepest magic match object that matched * * @throws IOException DOCUMENT ME! * @throws UnsupportedTypeException DOCUMENT ME! */ public MagicMatch test(File f, boolean onlyMimeMatch) throws IOException, UnsupportedTypeException { log.debug("test(File)"); int offset = match.getOffset(); String description = match.getDescription(); String type = match.getType(); String mimeType = match.getMimeType(); log.debug("test(File): testing '" + f.getName() + "' for '" + description + "'"); log.debug("test(File): \n=== BEGIN MATCH INFO =="); log.debug(match.print()); log.debug("test(File): \n=== END MATCH INFO ====\n"); RandomAccessFile file = null; file = new RandomAccessFile(f, "r"); try { int length = 0; if (type.equals("byte")) { length = 1; } else if (type.equals("short") || type.equals("leshort") || type.equals("beshort")) { length = 4; } else if (type.equals("long") || type.equals("lelong") || type.equals("belong")) { length = 8; } else if (type.equals("string")) { length = match.getTest().capacity(); } else if (type.equals("regex")) { final int matchLength = match.getLength(); length = (matchLength == 0) ? (int) file.length() - offset : matchLength; if (length < 0) { length = 0; } } else if (type.equals("detector")) { length = (int) file.length() - offset; if (length < 0) { length = 0; } } else { throw new UnsupportedTypeException("unsupported test type '" + type + "'"); } // we know this match won't work since there isn't enough data for the test if (length > (file.length() - offset)) { return null; } byte[] buf = new byte[length]; file.seek(offset); int bytesRead = 0; int size = 0; boolean gotAllBytes = false; boolean done = false; while (!done) { size = file.read(buf, 0, length - bytesRead); if (size == -1) { throw new IOException("reached end of file before all bytes were read"); } bytesRead += size; if (bytesRead == length) { gotAllBytes = true; done = true; } } log.debug("test(File): stream size is '" + buf.length + "'"); MagicMatch match = null; MagicMatch submatch = null; if (testInternal(buf)) { // set the top level match to this one try { match = getMatch() != null ? (MagicMatch) getMatch().clone() : null; } catch (CloneNotSupportedException e) { // noop } log.debug("test(File): testing matched '" + description + "'"); // set the data on this match if ((onlyMimeMatch == false) && (subMatchers != null) && (subMatchers.size() > 0)) { log.debug( "test(File): testing " + subMatchers.size() + " submatches for '" + description + "'"); for (int i = 0; i < subMatchers.size(); i++) { log.debug("test(File): testing submatch " + i); MagicMatcher m = (MagicMatcher) subMatchers.get(i); if ((submatch = m.test(f, false)) != null) { log.debug("test(File): submatch " + i + " matched with '" + submatch.getDescription() + "'"); match.addSubMatch(submatch); } else { log.debug("test(File): submatch " + i + " doesn't match"); } } } } return match; } finally { try { file.close(); } catch (Exception fce) { } } }
From source file:au.org.ala.layers.dao.ObjectDAOImpl.java
@Override public List<Objects> getObjectsById(String id, int start, int pageSize) { logger.info("Getting object info for fid = " + id); String limit_offset = " limit " + (pageSize < 0 ? "all" : pageSize) + " offset " + start; String sql = "select o.pid as pid, o.id as id, o.name as name, o.desc as description, " + "o.fid as fid, f.name as fieldname, o.bbox, o.area_km, " + "ST_AsText(ST_Centroid(o.the_geom)) as centroid," + "GeometryType(o.the_geom) as featureType from objects o, fields f " + "where o.fid = ? and o.fid = f.id order by o.pid " + limit_offset; List<Objects> objects = jdbcTemplate.query(sql, ParameterizedBeanPropertyRowMapper.newInstance(Objects.class), id); updateObjectWms(objects);//from w w w. j a v a 2 s . co m // get grid classes if (objects == null || objects.isEmpty()) { objects = new ArrayList<Objects>(); IntersectionFile f = layerIntersectDao.getConfig().getIntersectionFile(id); if (f != null && f.getClasses() != null) { //shape position int pos = 0; for (Entry<Integer, GridClass> c : f.getClasses().entrySet()) { File file = new File(f.getFilePath() + File.separator + c.getKey() + ".wkt.index.dat"); if (f.getType().equals("a") || !file.exists()) { // class pid if (pageSize == -1 || (pos >= start && pos - start < pageSize)) { Objects o = new Objects(); o.setPid(f.getLayerPid() + ":" + c.getKey()); o.setId(f.getLayerPid() + ":" + c.getKey()); o.setName(c.getValue().getName()); o.setFid(f.getFieldId()); o.setFieldname(f.getFieldName()); o.setBbox(c.getValue().getBbox()); o.setArea_km(c.getValue().getArea_km()); o.setWmsurl(getGridClassWms(f.getLayerName(), c.getValue())); objects.add(o); } pos++; if (pageSize != -1 && pos >= start + pageSize) { break; } } else { // polygon pid RandomAccessFile raf = null; try { raf = new RandomAccessFile(file, "r"); long itemSize = (4 + 4 + 4 * 4 + 4); long len = raf.length() / itemSize; // group if (pageSize != -1 && pos + len < start) { pos += len; } else { // number, // character // offset, // minx, // miny, // maxx, // maxy, // area // sq // km int i = 0; if (pageSize != -1 && pos < start) { //the first object requested is in this file, seek to the start i = start - pos; pos += i; raf.seek(i * itemSize); } for (; i < len; i++) { int n = raf.readInt(); /* int charoffset = */ raf.readInt(); float minx = raf.readFloat(); float miny = raf.readFloat(); float maxx = raf.readFloat(); float maxy = raf.readFloat(); float area = raf.readFloat(); if (pageSize == -1 || (pos >= start && pos - start < pageSize)) { Objects o = new Objects(); o.setPid(f.getLayerPid() + ":" + c.getKey() + ":" + n); o.setId(f.getLayerPid() + ":" + c.getKey() + ":" + n); o.setName(c.getValue().getName()); o.setFid(f.getFieldId()); o.setFieldname(f.getFieldName()); o.setBbox("POLYGON((" + minx + " " + miny + "," + minx + " " + maxy + "," + +maxx + " " + maxy + "," + +maxx + " " + miny + "," + +minx + " " + miny + "))"); o.setArea_km(1.0 * area); o.setWmsurl(getGridPolygonWms(f.getLayerName(), n)); objects.add(o); } pos++; if (pageSize != -1 && pos >= start + pageSize) { break; } } } } catch (Exception e) { logger.error(e.getMessage(), e); } finally { if (raf != null) { try { raf.close(); } catch (Exception e) { logger.error(e.getMessage(), e); } } } if (pageSize != -1 && pos >= start + pageSize) { break; } } } } } return objects; }