List of usage examples for java.nio ByteBuffer putInt
public abstract ByteBuffer putInt(int value);
From source file:edu.umass.cs.reconfiguration.reconfigurationpackets.ReconfigurationPacket.java
@Override public byte[] toBytes() { byte[] body = null; try {/*from ww w . j a v a2 s . c o m*/ body = this.toString().getBytes(CHARSET); } catch (UnsupportedEncodingException e) { e.printStackTrace(); return null; } if (!BYTEIFICATION) return body; // else byte[] bytes = new byte[body.length + 4]; ByteBuffer bbuf = ByteBuffer.wrap(bytes); bbuf.putInt(this.getType().getInt()); bbuf.put(body); return bytes; }
From source file:org.wso2.carbon.databridge.receiver.binary.internal.BinaryDataReceiver.java
private String processMessage(int messageType, byte[] message, OutputStream outputStream) { ByteBuffer byteBuffer = ByteBuffer.wrap(message); int sessionIdLength; String sessionId;//from w w w .j ava 2s . c om switch (messageType) { case 0: //Login int userNameLength = byteBuffer.getInt(); int passwordLength = byteBuffer.getInt(); String userName = new String(message, 8, userNameLength); String password = new String(message, 8 + userNameLength, passwordLength); try { sessionId = dataBridgeReceiverService.login(userName, password); ByteBuffer buffer = ByteBuffer.allocate(5 + sessionId.length()); buffer.put((byte) 2); buffer.putInt(sessionId.length()); buffer.put(sessionId.getBytes(BinaryMessageConstants.DEFAULT_CHARSET)); outputStream.write(buffer.array()); outputStream.flush(); } catch (Exception e) { try { sendError(e, outputStream); } catch (IOException e1) { log.error("Error while sending response for login message: " + e1.getMessage(), e1); } } break; case 1://Logout sessionIdLength = byteBuffer.getInt(); sessionId = new String(message, 4, sessionIdLength); try { dataBridgeReceiverService.logout(sessionId); outputStream.write((byte) 0); outputStream.flush(); } catch (Exception e) { try { sendError(e, outputStream); } catch (IOException e1) { log.error("Error while sending response for login message: " + e1.getMessage(), e1); } } break; case 2: //Publish sessionIdLength = byteBuffer.getInt(); sessionId = new String(message, 4, sessionIdLength); try { dataBridgeReceiverService.publish(message, sessionId, BinaryEventConverter.getConverter()); outputStream.write((byte) 0); outputStream.flush(); } catch (Exception e) { try { sendError(e, outputStream); } catch (IOException e1) { log.error("Error while sending response for login message: " + e1.getMessage(), e1); } } break; default: log.error("Message Type " + messageType + " is not supported!"); } return null; }
From source file:org.mule.util.queue.RandomAccessFileQueueStore.java
private long writeData(byte[] data) { try {/*from ww w .j ava2 s . c o m*/ if (getSize() > 0) { queueFileProvider.getRandomAccessFile().seek(fileTotalSpace); } long filePointer = queueFileProvider.getRandomAccessFile().getFilePointer(); int totalBytesRequired = CONTROL_DATA_SIZE + data.length; ByteBuffer byteBuffer = ByteBuffer.allocate(totalBytesRequired); byteBuffer.put(NOT_REMOVED); byteBuffer.putInt(data.length); byteBuffer.put(data); queueFileProvider.getRandomAccessFile().write(byteBuffer.array()); fileTotalSpace += totalBytesRequired; return filePointer; } catch (IOException e) { throw new MuleRuntimeException(e); } }
From source file:net.jenet.Connect.java
@Override public void toBuffer(ByteBuffer buffer) { super.toBuffer(buffer); buffer.putShort(outgoingPeerID);/* w w w .ja v a2s . c om*/ buffer.putShort(mtu); buffer.putInt(windowSize); buffer.putInt(channelCount); buffer.putInt(incomingBandwidth); buffer.putInt(outgoingBandwidth); buffer.putInt(packetThrottleInterval); buffer.putInt(packetThrottleAcceleration); buffer.putInt(packetThrottleDeceleration); }
From source file:org.getspout.spoutapi.packet.PacketEntityInformation.java
public PacketEntityInformation(List<LivingEntity> entities) { ByteBuffer tempbuffer = ByteBuffer.allocate(entities.size() * 20); //4 bytes for entity id, 16 for uuid for (Entity e : entities) { tempbuffer.putLong(e.getUniqueId().getLeastSignificantBits()); tempbuffer.putLong(e.getUniqueId().getMostSignificantBits()); tempbuffer.putInt(e.getEntityId()); }//from www .j ava 2 s . c o m data = tempbuffer.array(); }
From source file:com.codestation.henkakuserver.HenkakuServer.java
/** * Convert the exploit to a shellcode in binary format * * @param exploit payload compiled code/*from www.j ava 2 s . com*/ * @return the shellcode * @throws Exception */ private byte[] preprocessToBin(byte[] exploit) throws Exception { Pair<ArrayList<Integer>, List<Byte>> data = preprocessRop(exploit); int size = 4 + data.first.size() * 4 + data.second.size(); byte[] out = new byte[size + ((-size) & 3)]; ByteBuffer buf = ByteBuffer.wrap(out).order(ByteOrder.LITTLE_ENDIAN); buf.putInt(data.second.size()); for (Integer val : data.first) { buf.putInt(val); } for (Byte val : data.second) { buf.put(val); } return out; }
From source file:com.ottogroup.bi.spqr.pipeline.statistics.MicroPipelineStatistics.java
/** * Convert this {@link MicroPipelineStatistics} instance into its byte array representation * @return//from www . j a v a 2s . c o m */ public byte[] toByteArray() { ///////////////////////////////////////////////////////// // describes how the size of the result array is computed // SIZE_OF_INT + // SIZE_OF_LONG + // SIZE_OF_LONG + // SIZE_OF_INT + // SIZE_OF_INT + // SIZE_OF_INT + // SIZE_OF_INT + // SIZE_OF_INT + // SIZE_OF_INT + // SIZE_OF_INT + // procNodeId.length + // pid.length + // cid.length + // (SIZE_OF_INT * 3)); <-- add extra int's // for storing the field sizes of processingNodeId, pipelineId and componentId // which are required when extracting content from byte array // >> 11x SIZE_OF_INT // >> 3x SIZE_OF_LONG // // ByteBuffer buffer = ByteBuffer.allocate(11 * SIZE_OF_INT + 3 * SIZE_OF_LONG + procNodeId.length + pid.length + cid.length); // allocated buffer byte[] procNodeId = (this.processingNodeId != null ? this.processingNodeId.getBytes() : new byte[0]); byte[] pid = (this.pipelineId != null ? this.pipelineId.getBytes() : new byte[0]); byte[] cid = (this.componentId != null ? this.componentId.getBytes() : new byte[0]); ByteBuffer buffer = ByteBuffer .allocate(11 * SIZE_OF_INT + 2 * SIZE_OF_LONG + procNodeId.length + pid.length + cid.length); buffer.putInt(this.numOfMessages); buffer.putLong(this.startTime); buffer.putLong(this.endTime); buffer.putInt(this.minDuration); buffer.putInt(this.maxDuration); buffer.putInt(this.avgDuration); buffer.putInt(this.minSize); buffer.putInt(this.maxSize); buffer.putInt(this.avgSize); buffer.putInt(this.errors); buffer.putInt(procNodeId.length); buffer.put(procNodeId); buffer.putInt(pid.length); buffer.put(pid); buffer.putInt(cid.length); buffer.put(cid); return buffer.array(); }
From source file:org.apache.kylin.storage.hbase.cube.v1.filter.TestFuzzyRowFilterV2EndToEnd.java
@SuppressWarnings("deprecation") @Test/*from w ww . j a v a 2 s . co m*/ public void testFilterList() throws Exception { String cf = "f"; String table = "TestFuzzyRowFiltersInFilterList"; HTable ht = TEST_UTIL.createTable(TableName.valueOf(table), Bytes.toBytes(cf), Integer.MAX_VALUE); // 10 byte row key - (2 bytes 4 bytes 4 bytes) // 4 byte qualifier // 4 byte value for (int i1 = 0; i1 < 5; i1++) { for (int i2 = 0; i2 < 5; i2++) { byte[] rk = new byte[10]; ByteBuffer buf = ByteBuffer.wrap(rk); buf.clear(); buf.putShort((short) 2); buf.putInt(i1); buf.putInt(i2); // Each row contains 5 columns for (int c = 0; c < 5; c++) { byte[] cq = new byte[4]; Bytes.putBytes(cq, 0, Bytes.toBytes(c), 0, 4); Put p = new Put(rk); p.setDurability(Durability.SKIP_WAL); p.add(cf.getBytes(), cq, Bytes.toBytes(c)); ht.put(p); LOG.info("Inserting: rk: " + Bytes.toStringBinary(rk) + " cq: " + Bytes.toStringBinary(cq)); } } } TEST_UTIL.flush(); // test passes if we get back 5 KV's (1 row) runTest(ht, 5); }
From source file:org.apache.hadoop.hbase.filter.TestFuzzyRowFilterEndToEnd.java
@SuppressWarnings("deprecation") @Test/*from w w w .j a v a 2 s .c o m*/ public void testFilterList() throws Exception { String cf = "f"; String table = "TestFuzzyRowFiltersInFilterList"; Table ht = TEST_UTIL.createTable(TableName.valueOf(table), Bytes.toBytes(cf), Integer.MAX_VALUE); // 10 byte row key - (2 bytes 4 bytes 4 bytes) // 4 byte qualifier // 4 byte value for (int i1 = 0; i1 < 5; i1++) { for (int i2 = 0; i2 < 5; i2++) { byte[] rk = new byte[10]; ByteBuffer buf = ByteBuffer.wrap(rk); buf.clear(); buf.putShort((short) 2); buf.putInt(i1); buf.putInt(i2); // Each row contains 5 columns for (int c = 0; c < 5; c++) { byte[] cq = new byte[4]; Bytes.putBytes(cq, 0, Bytes.toBytes(c), 0, 4); Put p = new Put(rk); p.setDurability(Durability.SKIP_WAL); p.add(cf.getBytes(), cq, Bytes.toBytes(c)); ht.put(p); LOG.info("Inserting: rk: " + Bytes.toStringBinary(rk) + " cq: " + Bytes.toStringBinary(cq)); } } } TEST_UTIL.flush(); // test passes if we get back 5 KV's (1 row) runTest(ht, 5); }
From source file:edu.umn.cs.spatialHadoop.nasa.HDFRasterLayer.java
@Override public void write(DataOutput out) throws IOException { super.write(out); out.writeLong(timestamp);// w w w . j av a2s . co m ByteArrayOutputStream baos = new ByteArrayOutputStream(); GZIPOutputStream gzos = new GZIPOutputStream(baos); ByteBuffer bbuffer = ByteBuffer.allocate(getHeight() * 2 * 8 + 8); bbuffer.putInt(getWidth()); bbuffer.putInt(getHeight()); gzos.write(bbuffer.array(), 0, bbuffer.position()); for (int x = 0; x < getWidth(); x++) { bbuffer.clear(); for (int y = 0; y < getHeight(); y++) { bbuffer.putLong(sum[x][y]); bbuffer.putLong(count[x][y]); } gzos.write(bbuffer.array(), 0, bbuffer.position()); } gzos.close(); byte[] serializedData = baos.toByteArray(); out.writeInt(serializedData.length); out.write(serializedData); }