List of usage examples for java.nio ByteBuffer clear
public final Buffer clear()
From source file:com.healthmarketscience.jackcess.PageChannel.java
/** * @param buffer Buffer to read the page into * @param pageNumber Number of the page to read in (starting at 0) *//* w ww .jav a2s .c o m*/ public void readPage(ByteBuffer buffer, int pageNumber) throws IOException { validatePageNumber(pageNumber); if (LOG.isDebugEnabled()) { LOG.debug("Reading in page " + Integer.toHexString(pageNumber)); } buffer.clear(); int bytesRead = _channel.read(buffer, (long) pageNumber * (long) getFormat().PAGE_SIZE); buffer.flip(); if (bytesRead != getFormat().PAGE_SIZE) { throw new IOException("Failed attempting to read " + getFormat().PAGE_SIZE + " bytes from page " + pageNumber + ", only read " + bytesRead); } if (pageNumber == 0) { // de-mask header (note, page 0 never has additional encoding) applyHeaderMask(buffer); } else { _codecHandler.decodePage(buffer, pageNumber); } }
From source file:com.sastix.cms.server.services.content.impl.ZipFileHandlerServiceImpl.java
@Override public DataMaps unzip(byte[] bytes) throws IOException { Map<String, String> foldersMap = new HashMap<>(); Map<String, byte[]> extractedBytesMap = new HashMap<>(); InputStream byteInputStream = new ByteArrayInputStream(bytes); //validate that it is a zip file if (isZipFile(bytes)) { try {//from www . java 2 s . c o m //get the zip file content ZipInputStream zis = new ZipInputStream(byteInputStream); //get the zipped file list entry ZipEntry ze = zis.getNextEntry(); while (ze != null) { String fileName = ze.getName(); if (!ze.isDirectory()) {//if entry is a directory, we should not add it as a file ByteArrayOutputStream baos = new ByteArrayOutputStream(); try { ByteBuffer bufIn = ByteBuffer.allocate(1024); int bytesRead; while ((bytesRead = zis.read(bufIn.array())) > 0) { baos.write(bufIn.array(), 0, bytesRead); bufIn.rewind(); } bufIn.clear(); extractedBytesMap.put(fileName, baos.toByteArray()); } finally { baos.close(); } } else { foldersMap.put(fileName, fileName); } ze = zis.getNextEntry(); } zis.closeEntry(); zis.close(); } catch (IOException ex) { ex.printStackTrace(); } } DataMaps dataMaps = new DataMaps(); dataMaps.setBytesMap(extractedBytesMap); dataMaps.setFoldersMap(foldersMap); return dataMaps; }
From source file:com.hadoop.compression.lzo.LzoCompressor.java
/** * Reallocates a direct byte buffer by freeing the old one and allocating * a new one, unless the size is the same, in which case it is simply * cleared and returned.// www. j a v a 2 s.c o m * * NOTE: this uses unsafe APIs to manually free memory - if anyone else * has a reference to the 'buf' parameter they will likely read random * data or cause a segfault by accessing it. */ private ByteBuffer realloc(ByteBuffer buf, int newSize) { if (buf != null) { if (buf.capacity() == newSize) { // Can use existing buffer buf.clear(); return buf; } try { // Manually free the old buffer using undocumented unsafe APIs. // If this fails, we'll drop the reference and hope GC finds it // eventually. Method cleanerMethod = buf.getClass().getMethod("cleaner"); cleanerMethod.setAccessible(true); Object cleaner = cleanerMethod.invoke(buf); Method cleanMethod = cleaner.getClass().getMethod("clean"); cleanMethod.setAccessible(true); cleanMethod.invoke(cleaner); } catch (Exception e) { // Perhaps a non-sun-derived JVM - contributions welcome LOG.warn("Couldn't realloc bytebuffer", e); } } return ByteBuffer.allocateDirect(newSize); }
From source file:com.healthmarketscience.jackcess.impl.PageChannel.java
/** * @param buffer Buffer to read the page into * @param pageNumber Number of the page to read in (starting at 0) *//*w ww .jav a 2 s . co m*/ public void readPage(ByteBuffer buffer, int pageNumber) throws IOException { validatePageNumber(pageNumber); ByteBuffer inPage = buffer; ByteBuffer outPage = buffer; if ((pageNumber != 0) && !_codecHandler.canDecodeInline()) { inPage = _tempDecodeBufferH.getPageBuffer(this); outPage.clear(); } inPage.clear(); int bytesRead = _channel.read(inPage, (long) pageNumber * (long) getFormat().PAGE_SIZE); inPage.flip(); if (bytesRead != getFormat().PAGE_SIZE) { throw new IOException("Failed attempting to read " + getFormat().PAGE_SIZE + " bytes from page " + pageNumber + ", only read " + bytesRead); } if (pageNumber == 0) { // de-mask header (note, page 0 never has additional encoding) applyHeaderMask(buffer); } else { _codecHandler.decodePage(inPage, outPage, pageNumber); } }
From source file:com.intel.chimera.stream.AbstractCryptoStreamTest.java
private void doByteBufferWrite(String cipherClass, ByteArrayOutputStream baos, boolean withChannel) throws Exception { baos.reset();/* ww w . j av a2 s.c o m*/ CryptoOutputStream out = getCryptoOutputStream(baos, getCipher(cipherClass), defaultBufferSize, iv, withChannel); ByteBuffer buf = ByteBuffer.allocateDirect(dataLen / 2); buf.put(data, 0, dataLen / 2); buf.flip(); int n1 = out.write(buf); buf.clear(); buf.put(data, n1, dataLen / 3); buf.flip(); int n2 = out.write(buf); buf.clear(); buf.put(data, n1 + n2, dataLen - n1 - n2); buf.flip(); int n3 = out.write(buf); Assert.assertEquals(dataLen, n1 + n2 + n3); out.flush(); InputStream in = getCryptoInputStream(new ByteArrayInputStream(encData), getCipher(cipherClass), defaultBufferSize, iv, withChannel); buf = ByteBuffer.allocate(dataLen + 100); byteBufferReadCheck(in, buf, 0); in.close(); }
From source file:org.commonjava.indy.httprox.util.HttpConduitWrapper.java
public void writeExistingTransfer(Transfer txfr, boolean writeBody, String path, EventMetadata eventMetadata) throws IOException, IndyWorkflowException { Logger logger = LoggerFactory.getLogger(getClass()); logger.debug("Valid transfer found, {}", txfr); try (InputStream in = txfr.openInputStream(true, eventMetadata)) { final HttpExchangeMetadata metadata = contentController.getHttpMetadata(txfr); logger.trace("Got HTTP metadata: {} for transfer: {}", metadata, txfr); writeStatus(ApplicationStatus.OK); Long headerContentLength = metadata != null ? metadata.getContentLength() : null; long bytes = metadata != null && headerContentLength != null ? metadata.getContentLength() : txfr.length();//from www. ja va 2 s . com if (bytes < 1) { bytes = txfr.length(); } if (bytes > 0) { writeHeader(ApplicationHeader.content_length, String.valueOf(bytes)); } String lastMod = metadata != null ? metadata.getLastModified() : null; if (lastMod == null) { lastMod = HttpUtils.formatDateHeader(txfr.lastModified()); } if (lastMod != null) { writeHeader(ApplicationHeader.last_modified, lastMod); } String contentType = metadata.getContentType(); writeHeader(ApplicationHeader.content_type, contentType != null ? contentType : contentController.getContentType(path)); logger.trace("Write body, {}", writeBody); if (writeBody) { sinkChannel.write(ByteBuffer.wrap("\r\n".getBytes())); int capacity = DEFAULT_READ_BUF_SIZE; ByteBuffer bbuf = ByteBuffer.allocate(capacity); byte[] buf = new byte[capacity]; int read = -1; logger.trace("Read transfer..."); while ((read = in.read(buf)) > -1) { logger.trace("Read transfer and write to channel, size: {}", read); bbuf.clear(); bbuf.put(buf, 0, read); bbuf.flip(); write(sinkChannel, bbuf); } } } finally { cacheProvider.cleanupCurrentThread(); } sinkChannel.flush(); logger.debug("Write transfer DONE."); }
From source file:org.apache.camel.component.file.FileOperations.java
private void writeFileByStream(InputStream in, File target) throws IOException { FileChannel out = null;//from w ww . j a v a 2 s . c om try { out = prepareOutputFileChannel(target, out); if (LOG.isTraceEnabled()) { LOG.trace("Using InputStream to transfer from: " + in + " to: " + out); } int size = endpoint.getBufferSize(); byte[] buffer = new byte[size]; ByteBuffer byteBuffer = ByteBuffer.wrap(buffer); int bytesRead; while ((bytesRead = in.read(buffer)) != -1) { if (bytesRead < size) { byteBuffer.limit(bytesRead); } out.write(byteBuffer); byteBuffer.clear(); } } finally { IOHelper.close(in, target.getName(), LOG); IOHelper.close(out, target.getName(), LOG); } }
From source file:com.clustercontrol.agent.job.PublicKeyThread.java
/** * ?Authorized_key????<BR>/*from w w w .j a va2s . com*/ * * @param publicKey * @return */ private synchronized boolean addKey(String publicKey) { m_log.debug("add key start"); if (SKIP_KEYFILE_UPDATE) { m_log.info("skipped appending publicKey"); return true; } //??? String fileName = AgentProperties.getProperty(execUser.toLowerCase() + AUTHORIZED_KEY_PATH); m_log.debug("faileName" + fileName); if (fileName == null || fileName.length() == 0) return false; //File? File fi = new File(fileName); RandomAccessFile randomAccessFile = null; FileChannel channel = null; FileLock lock = null; boolean add = false; try { //RandomAccessFile? randomAccessFile = new RandomAccessFile(fi, "rw"); //FileChannel? channel = randomAccessFile.getChannel(); // for (int i = 0; i < (FILELOCK_TIMEOUT / FILELOCK_WAIT); i++) { if (null != (lock = channel.tryLock())) { break; } m_log.info("waiting for locked file... [" + (i + 1) + "/" + (FILELOCK_TIMEOUT / FILELOCK_WAIT) + " : " + fileName + "]"); Thread.sleep(FILELOCK_WAIT); } if (null == lock) { m_log.warn("file locking timeout."); return false; } // (?) synchronized (authKeyLock) { //?? channel.position(channel.size()); //? String writeData = "\n" + publicKey; // m_log.debug("add key : " + writeData); //????? ByteBuffer buffer = ByteBuffer.allocate(512); //??? buffer.clear(); buffer.put(writeData.getBytes()); buffer.flip(); channel.write(buffer); } add = true; } catch (Exception e) { m_log.error(e); } finally { try { if (channel != null) { channel.close(); } if (randomAccessFile != null) { randomAccessFile.close(); } if (lock != null) { // lock.release(); } } catch (Exception e) { } } return add; }
From source file:com.web.searchlocal.flashpaper.thread.Covnert2SwfTask.java
/** * //from w w w . java 2 s . co m */ public void excute() { String tmpOutFile = outFile.getPath().concat(File.separator) .concat(inFile.getName().replaceAll("[.]{1}.*$", ".swf")); List<String> commandArray = new ArrayList<String>(); commandArray.add(defaultCommand); commandArray.add(inFile.getPath()); commandArray.add("-o"); commandArray.add(tmpOutFile); ProcessBuilder pbObj = new ProcessBuilder(); pbObj.command(commandArray); pbObj.directory(outFile); pbObj.redirectErrorStream(true); try { Process proObj = pbObj.start(); final InputStream ins = proObj.getInputStream(); final ByteBuffer byteBuffer = ByteBuffer.allocate(1024); Thread th = new Thread() { public void run() { ReadableByteChannel rbcObj = Channels.newChannel(ins); try { while (rbcObj.read(byteBuffer) != -1) { byteBuffer.flip(); logger.info(java.nio.charset.Charset.defaultCharset().decode(byteBuffer)); byteBuffer.clear(); } } catch (IOException e) { logger.error(e); } } }; th.setDaemon(true); th.start(); try { proObj.waitFor(); logger.error("??." + tmpOutFile); } catch (InterruptedException e) { logger.error(e); } } catch (IOException e) { logger.error(e); } }
From source file:org.neo4j.io.pagecache.impl.SingleFilePageSwapperTest.java
private ByteBuffer wrap(byte[] bytes) { ByteBuffer buffer = ByteBuffer.allocateDirect(bytes.length); for (byte b : bytes) { buffer.put(b);/*from w w w. j a v a 2 s. c o m*/ } buffer.clear(); return buffer; }