List of usage examples for java.nio ByteBuffer rewind
public final Buffer rewind()
From source file:org.apache.hadoop.yarn.server.resourcemanager.RMAppManager.java
protected Credentials parseCredentials(ApplicationSubmissionContext application) throws IOException { Credentials credentials = new Credentials(); DataInputByteBuffer dibb = new DataInputByteBuffer(); ByteBuffer tokens = application.getAMContainerSpec().getTokens(); if (tokens != null) { dibb.reset(tokens);// w w w . j a v a 2 s . c o m credentials.readTokenStorageStream(dibb); tokens.rewind(); } return credentials; }
From source file:com.talis.storage.s3.S3StoreTest.java
@Override protected Store getStore() throws Exception { bucketname = UUID.randomUUID().toString(); Provider<S3Service> serviceProvider = initialiseServiceProvider(); final S3Object stubObject = new S3Object("foo"); objectFactory = new S3ObjectFactory(bucketname) { @Override//from w w w .ja va 2s. com public ExternalizableS3Object newObject(String key, int index, MediaType mediaType, ByteBuffer buffer) throws IOException { ExternalizableS3Object obj = new ExternalizableS3Object(); obj.setKey(key + "/" + index); obj.setContentType(S3Store.TMB_CHUNK_TYPE.toString()); obj.addMetadata(S3Store.ACTUAL_CONTENT_TYPE_HEADER, mediaType.toString()); byte[] bytes = new byte[buffer.position()]; buffer.rewind(); buffer.get(bytes); obj.setDataInputStream(new ByteArrayInputStream(bytes)); return obj; } }; chunkHandler = new StubChunkHandler(); return new S3Store(objectFactory, chunkHandler); }
From source file:com.edgenius.wiki.service.impl.SitemapServiceImpl.java
private void appendSitemapIndex(String sitemap) throws IOException { File sitemapIndexFile = new File(mapResourcesRoot.getFile(), SITEMAP_INDEX_NAME); if (!sitemapIndexFile.exists()) { //if a new sitemap file List<String> lines = new ArrayList<String>(); lines.add("<?xml version=\"1.0\" encoding=\"utf-8\"?>"); lines.add("<sitemapindex xmlns=\"http://www.sitemaps.org/schemas/sitemap/0.9\">"); lines.add("</sitemapindex>"); FileUtils.writeLines(sitemapIndexFile, lines); }/*from w ww .j av a 2 s . co m*/ RandomAccessFile rfile = new RandomAccessFile(sitemapIndexFile, "rw"); FileChannel channel = rfile.getChannel(); //this new content will append to end of file before XML end tag StringBuilder lines = new StringBuilder(); lines.append(" <sitemap>\n"); lines.append(" <loc>" + WebUtil.getHostAppURL() + SITEMAP_URL_CONTEXT + sitemap + "</loc>\n"); lines.append(" <lastmod>" + TIME_FORMAT.format(new Date()) + " </lastmod>\n"); lines.append(" </sitemap>\n"); //the last tag will be overwrite, so append it again to new content. lines.append(SITEMAP_INDEX_TAIL_FLAG); byte[] content = lines.toString().getBytes(); ByteBuffer byteBuf = ByteBuffer.allocate(512); // seek first int len = 0, headIdx = 0; long tailIdx = channel.size() - 512; tailIdx = tailIdx < 0 ? 0 : tailIdx; long headPos = -1; StringBuilder header = new StringBuilder(); while ((len = channel.read(byteBuf, tailIdx)) > 0) { byteBuf.rewind(); byte[] dst = new byte[len]; byteBuf.get(dst, 0, len); header.append(new String(dst, "UTF8")); headIdx = header.indexOf(SITEMAP_INDEX_TAIL_FLAG); if (headIdx != -1) { headPos = channel.size() - header.substring(headIdx).getBytes().length; break; } } FileLock lock = channel.tryLock(headPos, content.length, false); try { channel.write(ByteBuffer.wrap(content), headPos); } finally { lock.release(); } channel.force(false); rfile.close(); }
From source file:org.apache.hadoop.hbase.coprocessor.TimeseriesAggregateImplementation.java
@Override public void getAvg(RpcController controller, TimeseriesAggregateRequest request, RpcCallback<TimeseriesAggregateResponse> done) { TimeseriesAggregateResponse response = null; InternalScanner scanner = null;/* w w w .j a v a2 s . c om*/ Map<Long, SimpleEntry<Long, S>> averages = new HashMap<Long, SimpleEntry<Long, S>>(); boolean hasScannerRange = false; if (!request.hasRange()) { hasScannerRange = true; // When no timerange is being passed in via // the request, it is // assumed, that the scanner is // timestamp-range bound } try { ColumnInterpreter<T, S, P, Q, R> ci = constructColumnInterpreterFromRequest(request); S sumVal = null; T temp; Long kvCountVal = 0l; Scan scan = ProtobufUtil.toScan(request.getScan()); scanner = env.getRegion().getScanner(scan); List<TimeRange> timeRanges = getAllTimeRanges(scan, request); byte[] colFamily = scan.getFamilies()[0]; List<Cell> results = new ArrayList<Cell>(); boolean hasMoreRows = false; do { results.clear(); hasMoreRows = scanner.next(results); for (Cell kv : results) { long timestamp = 0; if (hasScannerRange) timestamp = kv.getTimestamp(); else timestamp = getMillisTimestampFromOffset(getTimestampFromRowKeyAsMillis(kv, request), Bytes.toInt(kv.getQualifier())); for (TimeRange t : timeRanges) { if (t.withinTimeRange(timestamp)) { long minTimestamp = t.getMin(); if (averages.containsKey(minTimestamp)) { sumVal = averages.get(minTimestamp).getValue(); kvCountVal = averages.get(minTimestamp).getKey(); } else { sumVal = null; kvCountVal = 0l; } temp = ci.getValue(colFamily, kv.getQualifier(), kv); if (temp != null) { kvCountVal++; sumVal = ci.add(sumVal, ci.castToReturnType(temp)); averages.put(t.getMin(), new AbstractMap.SimpleEntry<Long, S>(kvCountVal, sumVal)); } } } } } while (hasMoreRows); if (!averages.isEmpty()) { TimeseriesAggregateResponse.Builder responseBuilder = TimeseriesAggregateResponse.newBuilder(); for (Entry<Long, SimpleEntry<Long, S>> entry : averages.entrySet()) { TimeseriesAggregateResponseEntry.Builder valueBuilder = TimeseriesAggregateResponseEntry .newBuilder(); TimeseriesAggregateResponseMapEntry.Builder mapElementBuilder = TimeseriesAggregateResponseMapEntry .newBuilder(); ByteString first = ci.getProtoForPromotedType(entry.getValue().getValue()).toByteString(); valueBuilder.addFirstPart(first); ByteBuffer bb = ByteBuffer.allocate(8).putLong(entry.getValue().getKey()); bb.rewind(); valueBuilder.setSecondPart(ByteString.copyFrom(bb)); mapElementBuilder.setKey(entry.getKey()); mapElementBuilder.setValue(valueBuilder.build()); responseBuilder.addEntry(mapElementBuilder.build()); } response = responseBuilder.build(); } } catch (IOException e) { ResponseConverter.setControllerException(controller, e); } finally { if (scanner != null) { try { scanner.close(); } catch (IOException ignored) { } } } log.info("Averages from this region are " + env.getRegion().getRegionNameAsString() + ": " + averages.toString()); done.run(response); }
From source file:org.apache.james.protocols.smtp.core.esmtp.MailSizeEsmtpExtension.java
/** * @see org.apache.james.protocols.smtp.core.DataLineFilter#onLine(SMTPSession, byte[], LineHandler) *//*from w w w .ja v a 2 s .c om*/ public Response onLine(SMTPSession session, ByteBuffer line, LineHandler<SMTPSession> next) { Response response = null; Boolean failed = (Boolean) session.getAttachment(MESG_FAILED, State.Transaction); // If we already defined we failed and sent a reply we should simply // wait for a CRLF.CRLF to be sent by the client. if (failed != null && failed.booleanValue()) { // TODO } else { if (line.remaining() == 3 && line.get() == 46) { line.rewind(); response = next.onLine(session, line); } else { line.rewind(); Long currentSize = (Long) session.getAttachment("CURRENT_SIZE", State.Transaction); Long newSize; if (currentSize == null) { newSize = Long.valueOf(line.remaining()); } else { newSize = Long.valueOf(currentSize.intValue() + line.remaining()); } if (session.getConfiguration().getMaxMessageSize() > 0 && newSize.intValue() > session.getConfiguration().getMaxMessageSize()) { // Add an item to the state to suppress // logging of extra lines of data // that are sent after the size limit has // been hit. session.setAttachment(MESG_FAILED, Boolean.TRUE, State.Transaction); // then let the client know that the size // limit has been hit. response = next.onLine(session, ByteBuffer.wrap(".\r\n".getBytes())); } else { line.rewind(); response = next.onLine(session, line); } session.setAttachment("CURRENT_SIZE", newSize, State.Transaction); } } return response; }
From source file:org.apache.hadoop.hdfs.hoss.db.FileBlockStore.java
/** * Read block from file//from w w w. jav a 2 s . c o m * * @param index * of block * @return ByteBuffer from pool with data */ public ByteBuffer get(final int index) { if (!validState) throw new InvalidStateException(); if (LOG.isDebugEnabled()) LOG.debug("get(" + index + ")"); try { if (useMmap) { final MappedByteBuffer mbb = getMmapForIndex(index, true); if (mbb != null) { return mbb; } // Callback to RAF } final ByteBuffer buf = bufstack.pop(); fileChannel.position(index * blockSize).read(buf); buf.rewind(); return buf; } catch (Exception e) { LOG.error("Exception in get(" + index + ")", e); } return null; }
From source file:org.apache.usergrid.security.tokens.cassandra.TokenServiceImpl.java
private UUID getUUIDForToken(String token) throws ExpiredTokenException, BadTokenException { TokenCategory tokenCategory = TokenCategory.getFromBase64String(token); byte[] bytes = decodeBase64(token.substring(TokenCategory.BASE64_PREFIX_LENGTH)); UUID uuid = uuid(bytes);/*from www . ja v a 2 s. co m*/ int i = 16; long expires = Long.MAX_VALUE; if (tokenCategory.getExpires()) { expires = ByteBuffer.wrap(bytes, i, 8).getLong(); i = 24; } ByteBuffer expected = ByteBuffer.allocate(20); expected.put(sha(tokenCategory.getPrefix() + uuid + tokenSecretSalt + expires)); expected.rewind(); ByteBuffer signature = ByteBuffer.wrap(bytes, i, 20); if (!signature.equals(expected)) { throw new BadTokenException("Invalid token signature"); } long expirationDelta = System.currentTimeMillis() - expires; if (expires != Long.MAX_VALUE && expirationDelta > 0) { throw new ExpiredTokenException(String.format("Token expired %d millisecons ago.", expirationDelta)); } return uuid; }
From source file:com.yobidrive.diskmap.needles.Needle.java
public boolean getNeedleHeaderFromBuffer(ByteBuffer input) throws Exception { try {// w w w. ja va 2 s . c o m // Reinit needle keyBytes = null; version = null; flags = 0x00; size = 0; data = null; previousNeedle = null; // Chaining readBytes = 0; // Processes reading input.rewind(); int startPosition = input.position(); int magic = input.getInt(); if (magic == MAGICSTART_BADENDIAN) { if (input.order().equals(ByteOrder.BIG_ENDIAN)) input.order(ByteOrder.LITTLE_ENDIAN); else input.order(ByteOrder.BIG_ENDIAN); } else if (magic != MAGICSTART) { logger.error("Buffer not starting with needle"); return false; } needleNumber = input.getLong(); flags = input.get(); int keyLen = input.getInt(); if (keyLen > 2028) { logger.error("Crazy needle key len"); return false; } keyBytes = new byte[keyLen]; input.get(keyBytes); int versionLen = input.getInt(); if (versionLen > 1024 * 16) { logger.error("Crazy needle version len"); return false; } if (versionLen == 0) version = null; else { byte[] versionBytes = new byte[versionLen]; input.get(versionBytes); version = new VectorClock(versionBytes); } int previousLogNumber = input.getInt(); // Chaining long previousNeedleOffset = input.getLong(); // Chaining if (previousLogNumber != -1 && previousNeedleOffset != -1L) { previousNeedle = new NeedlePointer(); previousNeedle.setNeedleFileNumber(previousLogNumber); previousNeedle.setNeedleOffset(previousNeedleOffset); } originalFileNumber = input.getInt(); // Original needle location (for cleaning) originalSize = input.getInt(); // Original needle size (for cleaning) size = input.getInt(); readBytes = input.position() - startPosition; input.rewind(); // input.mark() ; return true; } catch (BufferUnderflowException bue) { return false; } }
From source file:voldemort.store.cachestore.impl.ChannelStore.java
private boolean checkSignature(FileChannel channel) throws IOException { ByteBuffer intBytes = ByteBuffer.allocate(OFFSET); if (channel.size() == 0) { intBytes.putInt(MAGIC);/*from w w w . jav a 2 s .c om*/ intBytes.flip(); channel.write(intBytes); } else { channel.read(intBytes); intBytes.rewind(); int s = intBytes.getInt(); if (s != MAGIC) throw new StoreException( "Header mismatch expect " + Integer.toHexString(MAGIC) + " read " + Integer.toHexString(s)); } return true; }
From source file:org.apache.usergrid.persistence.Schema.java
public static ByteBuffer encrypt(ByteBuffer clear) { if (clear == null || !clear.hasRemaining()) { return clear; }//from w w w . j a v a2 s . c om try { SecretKeySpec sKeySpec = new SecretKeySpec(getRawKey(encryptionSeed), "AES"); Cipher cipher = Cipher.getInstance("AES"); cipher.init(Cipher.ENCRYPT_MODE, sKeySpec); ByteBuffer encrypted = ByteBuffer.allocate(cipher.getOutputSize(clear.remaining())); cipher.doFinal(clear, encrypted); encrypted.rewind(); return encrypted; } catch (Exception e) { throw new IllegalStateException(e); } }