List of usage examples for java.io RandomAccessFile close
public void close() throws IOException
From source file:big.BigZip.java
/** * Given a position inside our knowledge base, retrieve the data up to * the next file indicator.// ww w. j av a 2s . co m * @param targetFile The new file that will be created * @param startPosition The position from where we start to read the data * @param endPosition * @return */ public boolean extractBytes(final File targetFile, final long startPosition, final Long endPosition) { /** * This is a tricky method. We will be extracting data from a the BIG * archive onto a new file somewhere on disk. The biggest challenge here * is to find exactly when the data for the file ends and still do the * file copy with a wonderful performance. */ try { // enable random access to the BIG file (fast as heck) RandomAccessFile dataBIG = new RandomAccessFile(fileMainBIG, "r"); // if the target file exists, try to delete it if (targetFile.exists()) { targetFile.delete(); if (targetFile.exists()) { // we failed completely System.out.println("BIG405 - Failed to delete: " + targetFile.getAbsolutePath()); return false; } } // we need to create a temporary zip file holder File fileZip = new File("temp.zip"); // delete the zip file if it already exists if (fileZip.exists()) { fileZip.delete(); if (fileZip.exists()) { // we failed completely System.out.println("BIG416 - Failed to delete: " + fileZip.getAbsolutePath()); return false; } } // create a new file RandomAccessFile dataNew = new RandomAccessFile(fileZip, "rw"); // jump directly to the position where the file is positioned dataBIG.seek(startPosition); // now we start reading bytes during the mentioned interval while (dataBIG.getFilePointer() < endPosition) { // read a byte from our BIG archive int data = dataBIG.read(); // write the same byte on the target file dataNew.write(data); } // close the file streams dataBIG.close(); dataNew.close(); // extract the file zip.extract(fileZip, new File(".")); // delete the temp zip file fileZip.delete(); } catch (FileNotFoundException ex) { Logger.getLogger(BigZip.class.getName()).log(Level.SEVERE, null, ex); return false; } catch (IOException ex) { Logger.getLogger(BigZip.class.getName()).log(Level.SEVERE, null, ex); return false; } return true; }
From source file:org.commoncrawl.service.crawler.CrawlSegmentLog.java
public static int reconcileLogFile(FileSystem fs, Path logFilePath, int listId, int segmentId, CrawlSegmentFPMap segment, File consolidationFile) throws IOException { RandomAccessFile consolidationStream = null; int consolidationFileItemCount = 0; if (consolidationFile != null) { consolidationStream = new RandomAccessFile(consolidationFile, "rw"); consolidationFileItemCount = readerHeader(consolidationFile); consolidationStream.seek(consolidationStream.length()); }/*w ww.jav a2 s . com*/ int processedItemCount = 0; FSDataInputStream hdfsInputStream = null; try { // get the file size on disk long fileSize = fs.getFileStatus(logFilePath).getLen(); // allocate an array that can hold up to the list size of items ... byte[] buffer = new byte[DEFAULT_LOGITEM_LIST_SIZE * LogItem.ItemSize_Bytes]; // calcuate item count int totalItemCount = (int) ((fileSize - getHeaderSize()) / LogItem.ItemSize_Bytes); // get a reader ... hdfsInputStream = fs.open(logFilePath); int headerItemCount = readHeader(hdfsInputStream); if (headerItemCount != totalItemCount) { LOG.warn("CrawlSegmentLog - header item count for log file:" + logFilePath.toString() + " is:" + headerItemCount + " file size indicates:" + totalItemCount); totalItemCount = headerItemCount; } int remainingItemCount = totalItemCount; LogItemBuffer itemList = new LogItemBuffer(listId, segmentId); while (remainingItemCount != 0) { int blockItemCount = Math.min(remainingItemCount, DEFAULT_LOGITEM_LIST_SIZE); // and read the data hdfsInputStream.read(buffer, 0, (int) blockItemCount * LogItem.ItemSize_Bytes); // and if consolidation stream is valid ... if (consolidationStream != null) { // add entries to that stream ... consolidationStream.write(buffer, 0, (int) blockItemCount * LogItem.ItemSize_Bytes); } // if not a dry run... if (segment != null) { // populate the item list itemList.loadFromStream(buffer, blockItemCount); // reconcile the list against the segment processedItemCount += reconcileItemList(itemList, segment); } // reduce item count remainingItemCount -= blockItemCount; } // finally if consolidation stream is valid ... if (consolidationStream != null) { // update the file's header .. writeHeader(consolidationFile, consolidationFileItemCount + totalItemCount); } } finally { if (consolidationStream != null) { consolidationStream.close(); } if (hdfsInputStream != null) { hdfsInputStream.close(); } } return processedItemCount; }
From source file:au.org.ala.layers.intersect.Grid.java
/** * @param points input array for longitude and latitude * double[number_of_points][2] and sorted latitude then longitude * @return array of .gri file values corresponding to the * points provided// w w w . jav a2 s.c om */ public float[] getValues3(double[][] points, int bufferSize) { //confirm inputs since they come from somewhere else if (points == null || points.length == 0) { return null; } if (subgrids != null) { return getValuesSubgrids(points, bufferSize); } //use preloaded grid data if available Grid g = Grid.getLoadedGrid(filename); if (g != null && g.grid_data != null) { return g.getValues2(points); } int length = points.length; int size, i; byte[] b; RandomAccessFile afile = null; File f2 = new File(filename + ".GRI"); try { //read of random access file can throw an exception if (!f2.exists()) { afile = new RandomAccessFile(filename + ".gri", "r"); } else { afile = new RandomAccessFile(filename + ".GRI", "r"); } //do not cache subgrids (using getValues2) if (!subgrid && afile.length() < 80 * 1024 * 1024) { try { afile.close(); afile = null; } catch (Exception e) { } return getValues2(points); } byte[] buffer = new byte[bufferSize]; //must be multiple of 64 Long bufferOffset = afile.length(); float[] ret = new float[points.length]; //get cell numbers long[][] cells = new long[points.length][2]; for (int j = 0; j < points.length; j++) { if (Double.isNaN(points[j][0]) || Double.isNaN(points[j][1])) { cells[j][0] = -1; cells[j][1] = j; } else { cells[j][0] = getcellnumber(points[j][0], points[j][1]); cells[j][1] = j; } } java.util.Arrays.sort(cells, new Comparator<long[]>() { @Override public int compare(long[] o1, long[] o2) { if (o1[0] == o2[0]) { return o1[1] > o2[1] ? 1 : -1; } else { return o1[0] > o2[0] ? 1 : -1; } } }); if (datatype.equalsIgnoreCase("BYTE")) { size = 1; for (i = 0; i < length; i++) { if (i > 0 && cells[i - 1][0] == cells[i][0]) { ret[(int) cells[i][1]] = ret[(int) cells[i - 1][1]]; continue; } if (cells[i][0] >= 0) { ret[(int) cells[i][1]] = getByte(afile, buffer, bufferOffset, cells[i][0] * size); } else { ret[(int) cells[i][1]] = Float.NaN; } } } else if (datatype.equalsIgnoreCase("UBYTE")) { size = 1; for (i = 0; i < length; i++) { if (i > 0 && cells[i - 1][0] == cells[i][0]) { ret[(int) cells[i][1]] = ret[(int) cells[i - 1][1]]; continue; } if (cells[i][0] >= 0) { ret[(int) cells[i][1]] = getByte(afile, buffer, bufferOffset, cells[i][0] * size); if (ret[(int) cells[i][1]] < 0) { ret[(int) cells[i][1]] += 256; } } else { ret[(int) cells[i][1]] = Float.NaN; } } } else if (datatype.equalsIgnoreCase("SHORT")) { size = 2; b = new byte[size]; for (i = 0; i < length; i++) { if (i > 0 && cells[i - 1][0] == cells[i][0]) { ret[(int) cells[i][1]] = ret[(int) cells[i - 1][1]]; continue; } if (cells[i][0] >= 0) { bufferOffset = getBytes(afile, buffer, bufferOffset, cells[i][0] * (long) size, b); if (byteorderLSB) { ret[(int) cells[i][1]] = (short) (((0xFF & b[1]) << 8) | (b[0] & 0xFF)); } else { ret[(int) cells[i][1]] = (short) (((0xFF & b[0]) << 8) | (b[1] & 0xFF)); } } else { ret[(int) cells[i][1]] = Float.NaN; } } } else if (datatype.equalsIgnoreCase("INT")) { size = 4; b = new byte[size]; for (i = 0; i < length; i++) { if (i > 0 && cells[i - 1][0] == cells[i][0]) { ret[(int) cells[i][1]] = ret[(int) cells[i - 1][1]]; continue; } if (cells[i][0] >= 0) { bufferOffset = getBytes(afile, buffer, bufferOffset, cells[i][0] * (long) size, b); if (byteorderLSB) { ret[(int) cells[i][1]] = ((0xFF & b[3]) << 24) | ((0xFF & b[2]) << 16) + ((0xFF & b[1]) << 8) + (b[0] & 0xFF); } else { ret[(int) cells[i][1]] = ((0xFF & b[0]) << 24) | ((0xFF & b[1]) << 16) + ((0xFF & b[2]) << 8) + ((0xFF & b[3]) & 0xFF); } } else { ret[(int) cells[i][1]] = Float.NaN; } } } else if (datatype.equalsIgnoreCase("LONG")) { size = 8; b = new byte[size]; for (i = 0; i < length; i++) { if (i > 0 && cells[i - 1][0] == cells[i][0]) { ret[(int) cells[i][1]] = ret[(int) cells[i - 1][1]]; continue; } if (cells[i][0] >= 0) { bufferOffset = getBytes(afile, buffer, bufferOffset, cells[i][0] * (long) size, b); if (byteorderLSB) { ret[(int) cells[i][1]] = ((long) (0xFF & b[7]) << 56) + ((long) (0xFF & b[6]) << 48) + ((long) (0xFF & b[5]) << 40) + ((long) (0xFF & b[4]) << 32) + ((long) (0xFF & b[3]) << 24) + ((long) (0xFF & b[2]) << 16) + ((long) (0xFF & b[1]) << 8) + (0xFF & b[0]); } else { ret[(int) cells[i][1]] = ((long) (0xFF & b[0]) << 56) + ((long) (0xFF & b[1]) << 48) + ((long) (0xFF & b[2]) << 40) + ((long) (0xFF & b[3]) << 32) + ((long) (0xFF & b[4]) << 24) + ((long) (0xFF & b[5]) << 16) + ((long) (0xFF & b[6]) << 8) + (0xFF & b[7]); } } else { ret[(int) cells[i][1]] = Float.NaN; } } } else if (datatype.equalsIgnoreCase("FLOAT")) { size = 4; b = new byte[size]; for (i = 0; i < length; i++) { if (i > 0 && cells[i - 1][0] == cells[i][0]) { ret[(int) cells[i][1]] = ret[(int) cells[i - 1][1]]; continue; } if (cells[i][0] >= 0) { bufferOffset = getBytes(afile, buffer, bufferOffset, cells[i][0] * (long) size, b); ByteBuffer bb = ByteBuffer.wrap(b); if (byteorderLSB) { bb.order(ByteOrder.LITTLE_ENDIAN); } ret[(int) cells[i][1]] = bb.getFloat(); } else { ret[(int) cells[i][1]] = Float.NaN; } } } else if (datatype.equalsIgnoreCase("DOUBLE")) { size = 8; b = new byte[8]; for (i = 0; i < length; i++) { if (i > 0 && cells[i - 1][0] == cells[i][0]) { ret[(int) cells[i][1]] = ret[(int) cells[i - 1][1]]; continue; } if (cells[i][0] >= 0) { getBytes(afile, buffer, bufferOffset, cells[i][0] * (long) size, b); ByteBuffer bb = ByteBuffer.wrap(b); if (byteorderLSB) { bb.order(ByteOrder.LITTLE_ENDIAN); } ret[(int) cells[i][1]] = (float) bb.getDouble(); } else { ret[(int) cells[i][1]] = Float.NaN; } } } else { logger.error("datatype not supported in Grid.getValues: " + datatype); // / should not happen; catch anyway... for (i = 0; i < length; i++) { ret[i] = Float.NaN; } } //replace not a number for (i = 0; i < length; i++) { if ((float) ret[i] == (float) nodatavalue) { ret[i] = Float.NaN; } else { ret[i] *= rescale; } } return ret; } catch (Exception e) { logger.error("error getting grid file values", e); } finally { if (afile != null) { try { afile.close(); } catch (Exception e) { logger.error(e.getMessage(), e); } } } return null; }
From source file:org.commoncrawl.service.listcrawler.CrawlList.java
private ProxyCrawlHistoryItem getHistoryItemFromOnDiskItem(OnDiskCrawlHistoryItem item) throws IOException { ProxyCrawlHistoryItem itemOut = new ProxyCrawlHistoryItem(); if ((item._flags & OnDiskCrawlHistoryItem.FLAG_HAS_CRAWL_STATUS) != 0) itemOut.setCrawlStatus(item._crawlStatus); if ((item._flags & OnDiskCrawlHistoryItem.FLAG_HAS_ORIGINAL_RESULT_CODE) != 0) itemOut.setHttpResultCode(item._httpResultCode); if ((item._flags & OnDiskCrawlHistoryItem.FLAG_HAS_REDIRECT_STATUS) != 0) itemOut.setRedirectStatus(item._redirectStatus); if ((item._flags & OnDiskCrawlHistoryItem.FLAG_HAS_REDIRECT_RESULT_CODE) != 0) itemOut.setRedirectHttpResult(item._redirectHttpResult); if ((item._flags & OnDiskCrawlHistoryItem.FLAG_HAS_LASTMODIFIED_TIME) != 0) itemOut.setLastModifiedTime(item._updateTimestamp); // now attept to get the string offset RandomAccessFile stringDataReader = new RandomAccessFile(_variableDataFile, "rw"); try {// www. ja v a2 s. c o m // seek to string data stringDataReader.seek(item._stringsOffset); // and skip buffer length WritableUtils.readVInt(stringDataReader); // now populate original url ... itemOut.setOriginalURL(stringDataReader.readUTF()); // now if redirect url is present if ((item._flags & OnDiskCrawlHistoryItem.FLAG_HAS_REDIRECT_URL) != 0) { itemOut.setRedirectURL(stringDataReader.readUTF()); } } finally { stringDataReader.close(); } return itemOut; }
From source file:com.twinsoft.convertigo.beans.steps.WriteXMLStep.java
protected void writeFile(String filePath, NodeList nodeList) throws EngineException { if (nodeList == null) { throw new EngineException("Unable to write to xml file: element is Null"); }/*from ww w . ja va2 s . c o m*/ String fullPathName = getAbsoluteFilePath(filePath); synchronized (Engine.theApp.filePropertyManager.getMutex(fullPathName)) { try { String encoding = getEncoding(); encoding = encoding.length() > 0 && Charset.isSupported(encoding) ? encoding : "UTF-8"; if (!isReallyAppend(fullPathName)) { String tTag = defaultRootTagname.length() > 0 ? StringUtils.normalize(defaultRootTagname) : "document"; FileUtils.write(new File(fullPathName), "<?xml version=\"1.0\" encoding=\"" + encoding + "\"?>\n<" + tTag + "/>", encoding); } StringBuffer content = new StringBuffer(); /* do the content, only append child element */ for (int i = 0; i < nodeList.getLength(); i++) { if (nodeList.item(i).getNodeType() == Node.ELEMENT_NODE) { content.append(XMLUtils.prettyPrintElement((Element) nodeList.item(i), true, true)); } } /* detect current xml encoding */ RandomAccessFile randomAccessFile = null; try { randomAccessFile = new RandomAccessFile(fullPathName, "rw"); FileChannel fc = randomAccessFile.getChannel(); ByteBuffer buf = ByteBuffer.allocate(60); int nb = fc.read(buf); String sbuf = new String(buf.array(), 0, nb, "ASCII"); String enc = sbuf.replaceFirst("^.*encoding=\"", "").replaceFirst("\"[\\d\\D]*$", ""); if (!Charset.isSupported(enc)) { enc = encoding; } buf.clear(); /* retrieve last header tag*/ long pos = fc.size() - buf.capacity(); if (pos < 0) { pos = 0; } nb = fc.read(buf, pos); boolean isUTF8 = Charset.forName(enc) == Charset.forName("UTF-8"); if (isUTF8) { for (int i = 0; i < buf.capacity(); i++) { sbuf = new String(buf.array(), i, nb - i, enc); if (!sbuf.startsWith("")) { pos += i; break; } } } else { sbuf = new String(buf.array(), 0, nb, enc); } int lastTagIndex = sbuf.lastIndexOf("</"); if (lastTagIndex == -1) { int iend = sbuf.lastIndexOf("/>"); if (iend != -1) { lastTagIndex = sbuf.lastIndexOf("<", iend); String tagname = sbuf.substring(lastTagIndex + 1, iend); content = new StringBuffer( "<" + tagname + ">\n" + content.toString() + "</" + tagname + ">"); } else { throw new EngineException("Malformed XML file"); } } else { content.append(sbuf.substring(lastTagIndex)); if (isUTF8) { String before = sbuf.substring(0, lastTagIndex); lastTagIndex = before.getBytes(enc).length; } } fc.write(ByteBuffer.wrap(content.toString().getBytes(enc)), pos + lastTagIndex); } finally { if (randomAccessFile != null) { randomAccessFile.close(); } } } catch (IOException e) { throw new EngineException("Unable to write to xml file", e); } finally { Engine.theApp.filePropertyManager.releaseMutex(fullPathName); } } }
From source file:io.minio.MinioClient.java
/** * Uploads given file as object in given bucket. * <p>/*from w ww. j ava 2 s .c o m*/ * If the object is larger than 5MB, the client will automatically use a multipart session. * </p> * <p> * If the session fails, the user may attempt to re-upload the object by attempting to create * the exact same object again. The client will examine all parts of any current upload session * and attempt to reuse the session automatically. If a mismatch is discovered, the upload will fail * before uploading any more data. Otherwise, it will resume uploading where the session left off. * </p> * <p> * If the multipart session fails, the user is responsible for resuming or removing the session. * </p> * * @param bucketName Bucket name. * @param objectName Object name to create in the bucket. * @param fileName File name to upload. * * @throws InvalidBucketNameException upon invalid bucket name is given * @throws NoResponseException upon no response from server * @throws IOException upon connection error * @throws XmlPullParserException upon parsing response xml * @throws ErrorResponseException upon unsuccessful execution * @throws InternalException upon internal library error */ public void putObject(String bucketName, String objectName, String fileName) throws InvalidBucketNameException, NoSuchAlgorithmException, InsufficientDataException, IOException, InvalidKeyException, NoResponseException, XmlPullParserException, ErrorResponseException, InternalException, InvalidArgumentException, InsufficientDataException { if (fileName == null || "".equals(fileName)) { throw new InvalidArgumentException("empty file name is not allowed"); } Path filePath = Paths.get(fileName); if (!Files.isRegularFile(filePath)) { throw new InvalidArgumentException("'" + fileName + "': not a regular file"); } String contentType = Files.probeContentType(filePath); long size = Files.size(filePath); RandomAccessFile file = new RandomAccessFile(filePath.toFile(), "r"); try { putObject(bucketName, objectName, contentType, size, file); } finally { file.close(); } }
From source file:big.BigZip.java
/** * Version 2 that permits to extract the text from a compressed file without * creating any file on the disk.//w ww . ja v a 2 s . c o m * @param startPosition Offset where the file begins * @param endPosition Offset where the file ends * @return The source code of the compressed file */ public String extractBytesToRAM(final long startPosition, final Long endPosition) { String result = null; try { // enable random access to the BIG file (fast as heck) RandomAccessFile dataBIG = new RandomAccessFile(fileMainBIG, "r"); // jump directly to the position where the file is positioned dataBIG.seek(startPosition); // create a byte array ByteArrayOutputStream byteOutput = new ByteArrayOutputStream(); // now we start reading bytes during the mentioned interval while (dataBIG.getFilePointer() < endPosition) { // read a byte from our BIG archive int data = dataBIG.read(); byteOutput.write(data); } // flush data at this point byteOutput.flush(); // now convert the stream from input into an output (to feed the zip stream) ByteArrayInputStream byteInput = new ByteArrayInputStream(byteOutput.toByteArray()); // where we place the decompressed bytes ByteArrayOutputStream textOutput = new ByteArrayOutputStream(); // create the zip streamer final ArchiveInputStream archiveStream; archiveStream = new ArchiveStreamFactory().createArchiveInputStream("zip", byteInput); final ZipArchiveEntry entry = (ZipArchiveEntry) archiveStream.getNextEntry(); // copy all bytes from one location to the other (and decompress the data) IOUtils.copy(archiveStream, textOutput); // flush the results textOutput.flush(); // we've got the result right here! result = textOutput.toString(); // now close all the streams that we have open dataBIG.close(); byteOutput.close(); byteInput.close(); textOutput.close(); archiveStream.close(); } catch (FileNotFoundException ex) { Logger.getLogger(BigZip.class.getName()).log(Level.SEVERE, null, ex); return null; } catch (IOException ex) { Logger.getLogger(BigZip.class.getName()).log(Level.SEVERE, null, ex); return null; } catch (ArchiveException ex) { Logger.getLogger(BigZip.class.getName()).log(Level.SEVERE, null, ex); } return result; }
From source file:com.siblinks.ws.service.impl.UserServiceImpl.java
/** * {@inheritDoc}/* w w w . j a va 2s.co m*/ * */ @SuppressWarnings("resource") @Override @RequestMapping(value = "/getAvatar/{path}", method = RequestMethod.GET, produces = MediaType.IMAGE_JPEG_VALUE) public ResponseEntity<byte[]> getAvatar(@PathVariable(value = "path") final String path) { logger.info("Call service get avatar"); RandomAccessFile randomAccessFile = null; ResponseEntity<byte[]> responseEntity = null; try { if (StringUtil.isNull(path)) { // Reader avatar file randomAccessFile = new RandomAccessFile(path, "r"); byte[] r = new byte[(int) randomAccessFile.length()]; randomAccessFile.readFully(r); responseEntity = new ResponseEntity<byte[]>(r, new HttpHeaders(), HttpStatus.OK); } else { responseEntity = new ResponseEntity<byte[]>(HttpStatus.NO_CONTENT); } } catch (Exception e) { logger.debug("File not found"); responseEntity = new ResponseEntity<byte[]>(HttpStatus.NOT_FOUND); } finally { try { if (randomAccessFile != null) { randomAccessFile.close(); } } catch (IOException io) { // Do nothing } } return responseEntity; }
From source file:au.org.ala.layers.intersect.Grid.java
/** * for DomainGenerator/*from ww w .jav a 2 s . c o m*/ * <p/> * writes out a list of double (same as getGrid() returns) to a file * <p/> * byteorderlsb * data type, FLOAT * * @param newfilename * @param dfiltered */ public void writeGrid(String newfilename, int[] dfiltered, double xmin, double ymin, double xmax, double ymax, double xres, double yres, int nrows, int ncols) { int size, i, length = dfiltered.length; double maxvalue = Integer.MAX_VALUE * -1; double minvalue = Integer.MAX_VALUE; //write data as whole file RandomAccessFile afile = null; try { //read of random access file can throw an exception afile = new RandomAccessFile(newfilename + ".gri", "rw"); size = 4; byte[] b = new byte[size * length]; ByteBuffer bb = ByteBuffer.wrap(b); if (byteorderLSB) { bb.order(ByteOrder.LITTLE_ENDIAN); } else { bb.order(ByteOrder.BIG_ENDIAN); } for (i = 0; i < length; i++) { bb.putInt(dfiltered[i]); } afile.write(b); } catch (Exception e) { logger.error("error writing grid file", e); } finally { if (afile != null) { try { afile.close(); } catch (Exception e) { logger.error(e.getMessage(), e); } } } writeHeader(newfilename, xmin, ymin, xmin + xres * ncols, ymin + yres * nrows, xres, yres, nrows, ncols, minvalue, maxvalue, "INT4BYTES", "-9999"); }