List of usage examples for java.nio ByteBuffer putInt
public abstract ByteBuffer putInt(int value);
From source file:org.wso2.carbon.databridge.receiver.binary.internal.BinaryDataReceiver.java
private void sendError(Exception e, OutputStream outputStream) throws IOException { int errorClassNameLength = e.getClass().getCanonicalName().length(); int errorMsgLength = e.getMessage().length(); ByteBuffer bbuf = ByteBuffer.wrap(new byte[8]); bbuf.putInt(errorClassNameLength); bbuf.putInt(errorMsgLength);/*from w w w . java 2 s. c om*/ outputStream.write((byte) 1);//Error outputStream.write(bbuf.array()); outputStream.write(e.getClass().getCanonicalName().getBytes(BinaryMessageConstants.DEFAULT_CHARSET)); outputStream.write(e.getMessage().getBytes(BinaryMessageConstants.DEFAULT_CHARSET)); outputStream.flush(); }
From source file:org.springframework.integration.support.json.EmbeddedJsonHeadersMessageMapper.java
private byte[] fromBytesPayload(byte[] payload, Map<String, Object> headersToEncode) throws Exception { byte[] headers = this.objectMapper.writeValueAsBytes(headersToEncode); ByteBuffer buffer = ByteBuffer.wrap(new byte[8 + headers.length + payload.length]); buffer.putInt(headers.length); buffer.put(headers);//from w w w .j a v a2 s.c o m buffer.putInt(payload.length); buffer.put(payload); return buffer.array(); }
From source file:org.apache.tez.runtime.library.output.TestOnFileUnorderedKVOutput.java
private OutputContext createOutputContext(Configuration conf, TezSharedExecutor sharedExecutor) throws IOException { int appAttemptNumber = 1; TezUmbilical tezUmbilical = mock(TezUmbilical.class); String dagName = "currentDAG"; String taskVertexName = "currentVertex"; String destinationVertexName = "destinationVertex"; TezDAGID dagID = TezDAGID.getInstance("2000", 1, 1); TezVertexID vertexID = TezVertexID.getInstance(dagID, 1); TezTaskID taskID = TezTaskID.getInstance(vertexID, 1); TezTaskAttemptID taskAttemptID = TezTaskAttemptID.getInstance(taskID, 1); UserPayload userPayload = TezUtils.createUserPayloadFromConf(conf); TaskSpec mockSpec = mock(TaskSpec.class); when(mockSpec.getInputs()).thenReturn(Collections.singletonList(mock(InputSpec.class))); when(mockSpec.getOutputs()).thenReturn(Collections.singletonList(mock(OutputSpec.class))); task = new LogicalIOProcessorRuntimeTask(mockSpec, appAttemptNumber, new Configuration(), new String[] { "/" }, tezUmbilical, null, null, null, null, "", null, 1024, false, new DefaultHadoopShim(), sharedExecutor); LogicalIOProcessorRuntimeTask runtimeTask = spy(task); Map<String, String> auxEnv = new HashMap<String, String>(); ByteBuffer bb = ByteBuffer.allocate(4); bb.putInt(shufflePort); bb.position(0);// w w w . j a va2 s. c om AuxiliaryServiceHelper.setServiceDataIntoEnv(conf.get(TezConfiguration.TEZ_AM_SHUFFLE_AUXILIARY_SERVICE_ID, TezConfiguration.TEZ_AM_SHUFFLE_AUXILIARY_SERVICE_ID_DEFAULT), bb, auxEnv); OutputDescriptor outputDescriptor = mock(OutputDescriptor.class); when(outputDescriptor.getClassName()).thenReturn("OutputDescriptor"); OutputContext realOutputContext = new TezOutputContextImpl(conf, new String[] { workDir.toString() }, appAttemptNumber, tezUmbilical, dagName, taskVertexName, destinationVertexName, -1, taskAttemptID, 0, userPayload, runtimeTask, null, auxEnv, new MemoryDistributor(1, 1, conf), outputDescriptor, null, new ExecutionContextImpl("localhost"), 2048, new TezSharedExecutor(defaultConf)); verify(runtimeTask, times(1)).addAndGetTezCounter(destinationVertexName); verify(runtimeTask, times(1)).getTaskStatistics(); // verify output stats object got created Assert.assertTrue(task.getTaskStatistics().getIOStatistics().containsKey(destinationVertexName)); OutputContext outputContext = spy(realOutputContext); doAnswer(new Answer() { @Override public Object answer(InvocationOnMock invocation) throws Throwable { long requestedSize = (Long) invocation.getArguments()[0]; MemoryUpdateCallbackHandler callback = (MemoryUpdateCallbackHandler) invocation.getArguments()[1]; callback.memoryAssigned(requestedSize); return null; } }).when(outputContext).requestInitialMemory(anyLong(), any(MemoryUpdateCallback.class)); return outputContext; }
From source file:org.apache.hadoop.hbase.filter.TestFuzzyRowAndColumnRangeFilter.java
private void runTest(HTable hTable, int cqStart, int expectedSize) throws IOException { // [0, 2, ?, ?, ?, ?, 0, 0, 0, 1] byte[] fuzzyKey = new byte[10]; ByteBuffer buf = ByteBuffer.wrap(fuzzyKey); buf.clear();/*from w w w. j av a2 s . co m*/ buf.putShort((short) 2); for (int i = 0; i < 4; i++) buf.put((byte) 63); buf.putInt((short) 1); byte[] mask = new byte[] { 0, 0, 1, 1, 1, 1, 0, 0, 0, 0 }; Pair<byte[], byte[]> pair = new Pair<byte[], byte[]>(fuzzyKey, mask); FuzzyRowFilter fuzzyRowFilter = new FuzzyRowFilter(Lists.newArrayList(pair)); ColumnRangeFilter columnRangeFilter = new ColumnRangeFilter(Bytes.toBytes(cqStart), true, Bytes.toBytes(4), true); //regular test runScanner(hTable, expectedSize, fuzzyRowFilter, columnRangeFilter); //reverse filter order test runScanner(hTable, expectedSize, columnRangeFilter, fuzzyRowFilter); }
From source file:com.cellngine.crypto.RSACipher.java
private byte[] encode(final BigInteger modulus, final BigInteger exponent) { final byte[] modulusEnc = modulus.toByteArray(); final byte[] exponentEnc = exponent.toByteArray(); final ByteBuffer buffer = ByteBuffer.allocate(2 * 4 + modulusEnc.length + exponentEnc.length); buffer.putInt(modulusEnc.length); buffer.put(modulusEnc);/*from w ww .jav a 2s . co m*/ buffer.putInt(exponentEnc.length); buffer.put(exponentEnc); return buffer.array(); }
From source file:com.linkedin.databus.core.DbusEventPart.java
public void encode(ByteBuffer buf) { int curPos = _data.position(); buf.putInt(getDataLength()); short attributes = 0; switch (_schemaDigestType) { case MD5://w ww. j a va2s .co m attributes = SCHEMA_DIGEST_TYPE_MD5; break; case CRC32: attributes = SCHEMA_DIGEST_TYPE_CRC32; break; default: throw new UnsupportedOperationException("Unsupported schema digest type:" + _schemaDigestType); } attributes |= (_schemaVersion << VERSION_SHIFT); buf.putShort(attributes); buf.put(_schemaDigest); buf.put(_data); _data.position(curPos); }
From source file:org.springframework.integration.ip.tcp.serializer.ByteArrayLengthHeaderSerializer.java
/** * Writes the header, according to the header format. * @param outputStream/*from w w w . j a va 2s . c o m*/ * @param length * @throws IOException */ protected void writeHeader(OutputStream outputStream, int length) throws IOException { ByteBuffer lengthPart = ByteBuffer.allocate(this.headerSize); switch (this.headerSize) { case HEADER_SIZE_INT: lengthPart.putInt(length); break; case HEADER_SIZE_UNSIGNED_BYTE: if (length > 0xff) { throw new IllegalArgumentException( "Length header:" + headerSize + " too short to accommodate message length:" + length); } lengthPart.put((byte) length); break; case HEADER_SIZE_UNSIGNED_SHORT: if (length > 0xffff) { throw new IllegalArgumentException( "Length header:" + headerSize + " too short to accommodate message length:" + length); } lengthPart.putShort((short) length); break; default: throw new IllegalArgumentException("Bad header size:" + headerSize); } outputStream.write(lengthPart.array()); }
From source file:ru.jts_dev.common.tcp.ProtocolByteArrayLengthHeaderSerializer.java
@Override protected void writeHeader(OutputStream outputStream, int length) throws IOException { ByteBuffer lengthPart = ByteBuffer.allocate(this.headerSize).order(LITTLE_ENDIAN); length += headerSize; // Protocol thing, length represent header size + data size switch (this.headerSize) { case HEADER_SIZE_INT: lengthPart.putInt(length); break;/*w ww. j a v a2 s. co m*/ case HEADER_SIZE_UNSIGNED_BYTE: if (length > 0xff) { throw new IllegalArgumentException( "Length header:" + headerSize + " too short to accommodate message length:" + length); } lengthPart.put((byte) length); break; case HEADER_SIZE_UNSIGNED_SHORT: if (length > 0xffff) { throw new IllegalArgumentException( "Length header:" + headerSize + " too short to accommodate message length:" + length); } lengthPart.putShort((short) length); break; default: throw new IllegalArgumentException("Bad header size:" + headerSize); } outputStream.write(lengthPart.array()); }
From source file:edu.umn.cs.spatialHadoop.visualization.FrequencyMap.java
@Override public void write(DataOutput out) throws IOException { super.write(out); ByteArrayOutputStream baos = new ByteArrayOutputStream(); GZIPOutputStream gzos = new GZIPOutputStream(baos); ByteBuffer bbuffer = ByteBuffer.allocate(getHeight() * 4 + 8); bbuffer.putInt(getWidth()); bbuffer.putInt(getHeight());/* ww w. j a v a2 s . com*/ gzos.write(bbuffer.array(), 0, bbuffer.position()); for (int x = 0; x < getWidth(); x++) { bbuffer.clear(); for (int y = 0; y < getHeight(); y++) { bbuffer.putFloat(frequencies[x][y]); } gzos.write(bbuffer.array(), 0, bbuffer.position()); } gzos.close(); byte[] serializedData = baos.toByteArray(); out.writeInt(serializedData.length); out.write(serializedData); }
From source file:edu.berkeley.sparrow.examples.FairnessTestingFrontend.java
public List<TTaskSpec> generateJob(int numTasks, int numPreferredNodes, List<String> backends, int benchmarkId, int benchmarkIterations) { // Pack task parameters ByteBuffer message = ByteBuffer.allocate(8); message.putInt(benchmarkId); message.putInt(benchmarkIterations); List<TTaskSpec> out = new ArrayList<TTaskSpec>(); for (int taskId = 0; taskId < numTasks; taskId++) { TTaskSpec spec = new TTaskSpec(); spec.setTaskId(Integer.toString((new Random().nextInt()))); spec.setMessage(message.array()); if (numPreferredNodes > 0) { Collections.shuffle(backends); TPlacementPreference preference = new TPlacementPreference(); for (int i = 0; i < numPreferredNodes; i++) { preference.addToNodes(backends.get(i)); }/*from w w w . java 2s .c o m*/ spec.setPreference(preference); } out.add(spec); } return out; }