List of usage examples for java.nio ByteBuffer flip
public final Buffer flip()
From source file:org.bytesoft.bytetcc.work.CleanupWork.java
private void invokeForget(Xid xid, String resource, ByteBuffer buffer) throws IllegalStateException { try {/*from w w w .ja va2 s.c o m*/ this.lock.lock(); int position = buffer.capacity() + this.endIndex; if (position > this.sizeOfRaf) { try { this.raf.setLength(position); this.sizeOfRaf = position; } catch (IOException ex) { throw new IllegalStateException(ex.getMessage()); } } try { this.channel.position(this.endIndex); buffer.flip(); this.channel.write(buffer); } catch (Exception ex) { throw new IllegalStateException(ex.getMessage()); } int current = this.endIndex; this.endIndex = position; this.header.position(IDENTIFIER.length + 2 + 4); this.header.putInt(position); Record record = new Record(); record.resource = resource; record.xid = xid; record.startIndex = current; // this.recordList.add(record); List<Record> recordList = this.recordMap.get(record.resource); if (recordList == null) { recordList = new ArrayList<Record>(); this.recordMap.put(record.resource, recordList); } recordList.add(record); this.condition.signalAll(); } finally { this.lock.unlock(); } }
From source file:net.cellcloud.talk.stuff.PrimitiveSerializer.java
/** ??? *//*from ww w . ja v a 2 s .com*/ public static void read(Primitive primitive, InputStream stream) { /* ?? [version]{sutff}...{stuff}[dialect@tracker] [01.00]{sub=cloud:string}{pre=add:string}[Action@Ambrose] */ try { byte phase = PARSE_PHASE_UNKNOWN; int read = 0; ByteBuffer buf = ByteBuffer.allocate(BLOCK); byte[] type = new byte[3]; byte[] value = null; byte[] literal = null; int length = 0; while ((read = stream.read()) >= 0) { // ? switch (phase) { case PARSE_PHASE_VALUE: // if (read == '\\') { // ? int next = stream.read(); if (next == TOKEN_OPEN_BRACE || next == TOKEN_CLOSE_BRACE || next == TOKEN_OPERATE_ASSIGN || next == TOKEN_OPERATE_DECLARE) { buf.put((byte) next); ++length; } else { buf.put((byte) read); buf.put((byte) next); length += 2; } // continue; } if (read == TOKEN_OPERATE_DECLARE) { // ? buf.flip(); value = new byte[length]; buf.get(value, 0, length); buf.clear(); phase = PARSE_PHASE_LITERAL; length = 0; continue; } buf.put((byte) read); ++length; break; case PARSE_PHASE_TYPE: if (read == TOKEN_OPERATE_ASSIGN) { // ? buf.flip(); buf.get(type); buf.clear(); phase = PARSE_PHASE_VALUE; length = 0; continue; } // buf.put((byte) read); break; case PARSE_PHASE_LITERAL: if (read == TOKEN_CLOSE_BRACE) { // ?? buf.flip(); literal = new byte[length]; buf.get(literal, 0, length); buf.clear(); // injectStuff(primitive, type, value, literal); phase = PARSE_PHASE_DIALECT; length = 0; continue; } buf.put((byte) read); ++length; break; case PARSE_PHASE_STUFF: if (read == TOKEN_OPEN_BRACE) { // ? phase = PARSE_PHASE_TYPE; buf.clear(); } break; case PARSE_PHASE_VERSION: if (read == TOKEN_CLOSE_BRACKET) { // ?? phase = PARSE_PHASE_STUFF; continue; } buf.put((byte) read); break; case PARSE_PHASE_DIALECT: if (read == TOKEN_OPEN_BRACE) { phase = PARSE_PHASE_TYPE; buf.clear(); } else if (read == TOKEN_OPEN_BRACKET) { // ? buf.clear(); } else if (read == TOKEN_CLOSE_BRACKET) { // ?? deserializeDialect(primitive, new String(buf.array(), 0, length, Charset.forName("UTF-8"))); } else { // ? buf.put((byte) read); ++length; } break; default: if (read == TOKEN_OPEN_BRACE) { phase = PARSE_PHASE_TYPE; buf.clear(); } else if (read == TOKEN_OPEN_BRACKET) { phase = PARSE_PHASE_VERSION; buf.clear(); } break; } } buf.clear(); } catch (IOException e) { Logger.log(PrimitiveSerializer.class, e, LogLevel.ERROR); } }
From source file:com.offbynull.portmapper.common.UdpCommunicator.java
@Override protected void run() throws Exception { ByteBuffer recvBuffer = ByteBuffer.allocate(1100); while (true) { selector.select();/*from ww w . j av a 2 s. co m*/ if (stopFlag) { return; } for (DatagramChannel channel : sendQueue.keySet()) { if (!sendQueue.get(channel).isEmpty()) { channel.register(selector, SelectionKey.OP_READ | SelectionKey.OP_WRITE); } else { channel.register(selector, SelectionKey.OP_READ); } } for (SelectionKey key : selector.selectedKeys()) { if (!key.isValid()) { continue; } DatagramChannel channel = (DatagramChannel) key.channel(); if (key.isReadable()) { recvBuffer.clear(); InetSocketAddress incomingAddress = (InetSocketAddress) channel.receive(recvBuffer); recvBuffer.flip(); for (UdpCommunicatorListener listener : listeners) { try { listener.incomingPacket(incomingAddress, channel, recvBuffer.asReadOnlyBuffer()); } catch (RuntimeException re) { // NOPMD // do nothing } } } else if (key.isWritable()) { LinkedBlockingQueue<ImmutablePair<InetSocketAddress, ByteBuffer>> queue = sendQueue .get(channel); ImmutablePair<InetSocketAddress, ByteBuffer> next = queue.poll(); if (next != null) { try { channel.send(next.getValue(), next.getKey()); } catch (RuntimeException re) { // NOPMD // do nothing } } } } } }
From source file:org.alfresco.contentstore.CassandraContentStore.java
private ByteBuffer getBlock(long globalBlockId) { ByteBuffer bb = null; ResultSet rs = cassandraSession.getCassandraSession().execute(getBlockStatement.bind(globalBlockId)); Row row = rs.one();/*from w ww. j a v a 2 s . c o m*/ if (row != null) { bb = row.getBytes("data"); bb.compact(); bb.flip(); } return bb; }
From source file:org.atricore.idbus.idojos.ldapidentitystore.LDAPIdentityStore.java
/** * Fetch the Ldap user attributes to be used as credentials. * * @param uid the user id for whom credentials are required * @return the hash map containing user credentials as name/value pairs * @throws NamingException LDAP error obtaining user credentials. *//*from w w w . j a va2 s . com*/ protected HashMap selectCredentials(String uid) throws NamingException { HashMap credentialResultSet = new HashMap(); InitialLdapContext ctx = createLdapInitialContext(); String principalUidAttrName = this.getPrincipalUidAttributeID(); String usersCtxDN = this.getUsersCtxDN(); // BasicAttributes matchAttrs = new BasicAttributes(true); // matchAttrs.put(principalUidAttrName, uid); String credentialQueryString = getCredentialQueryString(); HashMap credentialQueryMap = parseQueryString(credentialQueryString); Iterator i = credentialQueryMap.keySet().iterator(); List credentialAttrList = new ArrayList(); while (i.hasNext()) { String o = (String) i.next(); credentialAttrList.add(o); } String[] credentialAttr = (String[]) credentialAttrList.toArray(new String[credentialAttrList.size()]); try { // NamingEnumeration answer = ctx.search(usersCtxDN, matchAttrs, credentialAttr); // This gives more control over search behavior : NamingEnumeration answer = ctx.search(usersCtxDN, "(&(" + principalUidAttrName + "=" + uid + "))", getSearchControls()); while (answer.hasMore()) { SearchResult sr = (SearchResult) answer.next(); Attributes attrs = sr.getAttributes(); for (int j = 0; j < credentialAttr.length; j++) { Object credentialObject = attrs.get(credentialAttr[j]).get(); String credentialName = (String) credentialQueryMap.get(credentialAttr[j]); String credentialValue = null; if (logger.isDebugEnabled()) logger.debug("Found user credential '" + credentialName + "' of type '" + credentialObject.getClass().getName() + "" + (credentialObject.getClass().isArray() ? "[" + Array.getLength(credentialObject) + "]" : "") + "'"); // if the attribute value is an array, cast it to byte[] and then convert to // String using proper encoding if (credentialObject.getClass().isArray()) { try { // Try to create a UTF-8 String, we use java.nio to handle errors in a better way. // If the byte[] cannot be converted to UTF-8, we're using the credentialObject as is. byte[] credentialData = (byte[]) credentialObject; ByteBuffer in = ByteBuffer.allocate(credentialData.length); in.put(credentialData); in.flip(); Charset charset = Charset.forName("UTF-8"); CharsetDecoder decoder = charset.newDecoder(); CharBuffer charBuffer = decoder.decode(in); credentialValue = charBuffer.toString(); } catch (CharacterCodingException e) { if (logger.isDebugEnabled()) logger.debug("Can't convert credential value to String using UTF-8"); } } else if (credentialObject instanceof String) { // The credential value must be a String ... credentialValue = (String) credentialObject; } // Check what do we have ... if (credentialValue != null) { // Remove any schema information from the credential value, like the {md5} prefix for passwords. credentialValue = getSchemeFreeValue(credentialValue); credentialResultSet.put(credentialName, credentialValue); } else { // We have a binary credential, leave it as it is ... probably binary value. credentialResultSet.put(credentialName, credentialObject); } if (logger.isDebugEnabled()) logger.debug("Found user credential '" + credentialName + "' with value '" + (credentialValue != null ? credentialValue : credentialObject) + "'"); } } } catch (NamingException e) { if (logger.isDebugEnabled()) logger.debug("Failed to locate user", e); } finally { // Close the context to release the connection ctx.close(); } return credentialResultSet; }
From source file:edu.uci.ics.crawler4j.crawler.fetcher.PageFetcher.java
private boolean loadPage(final Page p, final InputStream in, final int totalsize, final boolean isBinary, String encoding) {//from w ww. ja va 2 s .c o m ByteBuffer bBuf; if (totalsize > 0) { bBuf = ByteBuffer.allocate(totalsize + 1024); } else { bBuf = ByteBuffer.allocate(maxDownloadSize); } final byte[] b = new byte[1024]; int len; double finished = 0; try { while ((len = in.read(b)) != -1) { if (finished + b.length > bBuf.capacity()) { break; } bBuf.put(b, 0, len); finished += len; } } catch (final BufferOverflowException boe) { System.out.println("Page size exceeds maximum allowed."); return false; } catch (final Exception e) { System.err.println(e.getMessage()); return false; } bBuf.flip(); if (isBinary) { byte[] tmp = new byte[bBuf.limit()]; bBuf.get(tmp); p.setBinaryData(tmp); } else { String html = ""; if (encoding == null) { int pos = bBuf.position(); html = Charset.forName("US-ASCII").decode(bBuf).toString(); bBuf.position(pos); pos = html.toLowerCase().indexOf("<meta http-equiv=\"content-type\" content=\""); if (pos >= 0) { int end = html.indexOf("\"", pos + 41); if (end >= 0) { String content = html.substring(pos, end); if (content.contains("charset=")) { encoding = content.substring(content.indexOf("charset=") + 8); } } } } if (encoding == null || !Charset.isSupported(encoding)) encoding = "UTF-8"; if (!encoding.equals("UTF-8")) { html = Charset.forName(encoding).decode(bBuf).toString(); } if (html.length() == 0) { return false; } p.setHTML(html); } return true; }
From source file:de.fhg.fokus.diameter.DiameterPeer.transport.Communicator.java
public void run() { MessageInfo messageInfo = null;/* w w w .j av a2s . co m*/ ByteBuffer receiveByteBuffer = ByteBuffer.allocateDirect(MAX_MESSAGE_LENGTH); DiameterMessage msg = null; byte[] buffer = null; int len = 0; //handler to keep track of association setup and termination AssociationHandler assocHandler = new AssociationHandler(); try { while (this.running) { messageInfo = sctpChannel.receive(receiveByteBuffer, System.out, assocHandler); log.debug("Received msg from communicator:" + this + " and sctpChannel:" + sctpChannel); log.debug("Received msg's length:" + messageInfo.bytes()); log.error("Received msg's length:" + messageInfo.bytes()); receiveByteBuffer.flip(); if (receiveByteBuffer.remaining() > 0) { buffer = new byte[messageInfo.bytes()]; receiveByteBuffer.get(buffer); receiveByteBuffer.clear(); // log.debug("The origin message stream is:\n" + CommonMethod.byteToHex(buffer)); //first we check the version if (buffer[0] != 1) { log.error("Expecting diameter version 1, received version " + buffer[0]); continue; } //then we check the length of the message len = ((int) buffer[1] & 0xFF) << 16 | ((int) buffer[2] & 0xFF) << 8 | ((int) buffer[3] & 0xFF); if (len > MAX_MESSAGE_LENGTH) { log.error("Message too long (msg length:" + len + " > max buffer length:" + MAX_MESSAGE_LENGTH + ")."); continue; } //now we can decode the message try { msg = Codec.decodeDiameterMessage(buffer, 0); } catch (DiameterMessageDecodeException e) { log.error("Error decoding diameter message !"); log.error(e, e); msg = null; continue; } msg.networkTime = System.currentTimeMillis(); log.debug("Received message is:\n" + msg); if (this.peer != null) { this.peer.refreshTimer(); } processMessage(msg); } msg = null; } } catch (Exception e1) { log.error("Exception:" + e1.getCause() + " catched in communicator:" + this + " and running flag=" + running); if (this.running) { if (this.peer != null) { if (this.peer.I_comm == this) { StateMachine.process(this.peer, StateMachine.I_Peer_Disc); } if (this.peer.R_comm == this) { log.error("Now closing the peer:" + this.peer); StateMachine.process(this.peer, StateMachine.R_Peer_Disc); } } log.error("Error reading from sctpChannel:" + sctpChannel + ", the channel might be colsed."); } /* else it was a shutdown request, it's normal */ } log.debug("Now closing communicator:" + this + ", and it's sctpChannel:" + sctpChannel); this.running = false; try { sctpChannel.close(); } catch (IOException e) { log.error("Error closing sctpChannel !"); log.error(e, e); } }
From source file:org.alfresco.contentstore.patch.PatchServiceImpl.java
private void updatePatchDocument(PatchDocument patchDocument, NodeChecksums checksums, Reader reader) throws IOException { ByteBuffer data = ByteBuffer.allocate(blockSize * 20); int blockSize = checksums.getBlockSize(); int i = 0;/* www . j a va 2s.c o m*/ Adler32 adlerInfo = new Adler32(hasher); int lastMatchIndex = 1; // starts at 1 ByteBuffer currentPatch = ByteBuffer.allocate(5000000); // TODO int x = 0; for (;;) { if (x == 0 || i >= data.limit()) { data.clear(); i = 0; int numRead = reader.read(data); if (numRead <= 0) { break; } data.flip(); x += numRead; } int chunkSize = 0; // determine the size of the next data chuck to evaluate. Default to // blockSize, but clamp to end of data if ((i + blockSize) > data.limit()) { chunkSize = data.limit() - i; adlerInfo.reset(); // need to reset this because the rolling // checksum doesn't work correctly on a final // non-aligned block } else { chunkSize = blockSize; } int end = i + chunkSize - 1; int matchedBlockIndex = adlerInfo.checkMatch(lastMatchIndex, checksums, data, i, end); if (matchedBlockIndex != -1) { // try // { // String y = hasher.md5(data, i, end); // System.out.println("y = " + y + ", x = " + x + ", i = " + i + ", end = " + end); // } // catch (NoSuchAlgorithmException e) // { // // TODO Auto-generated catch block // e.printStackTrace(); // } // if we have a match, do the following: // 1) add the matched block index to our tracking buffer // 2) check to see if there's a current patch. If so, add it to // the patch document. // 3) jump forward blockSize bytes and continue patchDocument.addMatchedBlock(matchedBlockIndex); if (currentPatch.position() > 0) { // there are outstanding patches, add them to the list // create the patch and append it to the patches buffer currentPatch.flip(); int size = currentPatch.limit(); byte[] dst = new byte[size]; currentPatch.get(dst, 0, size); Patch patch = new Patch(lastMatchIndex, size, dst); patchDocument.addPatch(patch); currentPatch.clear(); } lastMatchIndex = matchedBlockIndex; i += chunkSize; adlerInfo.reset(); } else { // while we don't have a block match, append bytes to the // current patch if (currentPatch.position() >= currentPatch.limit()) { // System.out.println("count=" + (x + i)); // System.out.println("count1=" + currentPatch.position() + ", " + currentPatch.limit()); // System.out.println(matchedBlockIndexes); // System.out.println(patches); } currentPatch.put(data.get(i)); i++; } } // end for each byte in the data if (currentPatch.position() > 0) { currentPatch.flip(); int size = currentPatch.limit(); byte[] dst = new byte[size]; currentPatch.get(dst, 0, size); Patch patch = new Patch(lastMatchIndex, size, dst); patchDocument.addPatch(patch); } }
From source file:com.koda.integ.hbase.blockcache.OffHeapBlockCacheOld.java
/** * Read external with codec./*from www.jav a 2 s . com*/ * * @param blockName the block name * @return the cacheable * @throws IOException Signals that an I/O exception has occurred. */ private Cacheable readExternalWithCodec(String blockName) throws IOException { if (overflowExtEnabled == false) return null; // Check if we have already this block in external storage cache try { // We use 16 - byte hash for external storage cache byte[] hashed = Utils.hash128(blockName); StorageHandle handle = (StorageHandle) extStorageCache.get(hashed); if (handle == null) return null; ByteBuffer buffer = extStorageCache.getLocalBufferWithAddress().getBuffer(); SerDe serde = extStorageCache.getSerDe(); @SuppressWarnings("unused") Codec codec = extStorageCache.getCompressionCodec(); buffer.clear(); StorageHandle newHandle = storage.getData(handle, buffer); if (buffer.position() > 0) buffer.flip(); int size = buffer.getInt(); if (size == 0) return null; // Skip key int keySize = buffer.getInt(); buffer.position(8 + keySize); boolean inMemory = buffer.get() == (byte) 1; //buffer.position(5); buffer.limit(size + 4); Cacheable obj = (Cacheable) serde.readCompressed(buffer/*, codec*/); if (inMemory) { permGenCache.put(blockName, obj); } else { tenGenCache.put(blockName, obj); } if (newHandle.equals(handle) == false) { extStorageCache.put(hashed, newHandle); } return obj; } catch (NativeMemoryException e) { throw new IOException(e); } }
From source file:hivemall.mf.OnlineMatrixFactorizationUDTF.java
protected final void runIterativeTraining(@Nonnegative final int iterations) throws HiveException { final ByteBuffer inputBuf = this.inputBuf; final NioFixedSegment fileIO = this.fileIO; assert (inputBuf != null); assert (fileIO != null); final long numTrainingExamples = count; final Reporter reporter = getReporter(); final Counter iterCounter = (reporter == null) ? null : reporter.getCounter("hivemall.mf.MatrixFactorization$Counter", "iteration"); try {/*from w ww . ja v a 2 s . c o m*/ if (lastWritePos == 0) {// run iterations w/o temporary file if (inputBuf.position() == 0) { return; // no training example } inputBuf.flip(); int iter = 2; for (; iter <= iterations; iter++) { reportProgress(reporter); setCounterValue(iterCounter, iter); while (inputBuf.remaining() > 0) { int user = inputBuf.getInt(); int item = inputBuf.getInt(); double rating = inputBuf.getDouble(); // invoke train count++; train(user, item, rating); } cvState.multiplyLoss(0.5d); if (cvState.isConverged(iter, numTrainingExamples)) { break; } inputBuf.rewind(); } logger.info("Performed " + Math.min(iter, iterations) + " iterations of " + NumberUtils.formatNumber(numTrainingExamples) + " training examples on memory (thus " + NumberUtils.formatNumber(count) + " training updates in total) "); } else {// read training examples in the temporary file and invoke train for each example // write training examples in buffer to a temporary file if (inputBuf.position() > 0) { writeBuffer(inputBuf, fileIO, lastWritePos); } else if (lastWritePos == 0) { return; // no training example } try { fileIO.flush(); } catch (IOException e) { throw new HiveException("Failed to flush a file: " + fileIO.getFile().getAbsolutePath(), e); } if (logger.isInfoEnabled()) { File tmpFile = fileIO.getFile(); logger.info( "Wrote " + numTrainingExamples + " records to a temporary file for iterative training: " + tmpFile.getAbsolutePath() + " (" + FileUtils.prettyFileSize(tmpFile) + ")"); } // run iterations int iter = 2; for (; iter <= iterations; iter++) { setCounterValue(iterCounter, iter); inputBuf.clear(); long seekPos = 0L; while (true) { reportProgress(reporter); // TODO prefetch // writes training examples to a buffer in the temporary file final int bytesRead; try { bytesRead = fileIO.read(seekPos, inputBuf); } catch (IOException e) { throw new HiveException("Failed to read a file: " + fileIO.getFile().getAbsolutePath(), e); } if (bytesRead == 0) { // reached file EOF break; } assert (bytesRead > 0) : bytesRead; seekPos += bytesRead; // reads training examples from a buffer inputBuf.flip(); int remain = inputBuf.remaining(); assert (remain > 0) : remain; for (; remain >= RECORD_BYTES; remain -= RECORD_BYTES) { int user = inputBuf.getInt(); int item = inputBuf.getInt(); double rating = inputBuf.getDouble(); // invoke train count++; train(user, item, rating); } inputBuf.compact(); } cvState.multiplyLoss(0.5d); if (cvState.isConverged(iter, numTrainingExamples)) { break; } } logger.info("Performed " + Math.min(iter, iterations) + " iterations of " + NumberUtils.formatNumber(numTrainingExamples) + " training examples using a secondary storage (thus " + NumberUtils.formatNumber(count) + " training updates in total)"); } } finally { // delete the temporary file and release resources try { fileIO.close(true); } catch (IOException e) { throw new HiveException("Failed to close a file: " + fileIO.getFile().getAbsolutePath(), e); } this.inputBuf = null; this.fileIO = null; } }