List of usage examples for java.io DataOutputStream writeInt
public final void writeInt(int v) throws IOException
int
to the underlying output stream as four bytes, high byte first. From source file:it.jnrpe.net.JNRPEProtocolPacket.java
/** * Converts the packet object to its byte array representation. * /*from w w w.j a v a 2s . c o m*/ * @return The byte array representation of this packet. */ public byte[] toByteArray() { ByteArrayOutputStream bout = new ByteArrayOutputStream(); DataOutputStream dout = new DataOutputStream(bout); try { dout.writeShort(packetVersion); dout.writeShort(packetTypeCode); dout.writeInt(crcValue); dout.writeShort(resultCode); dout.write(byteBufferAry); dout.write(dummyBytesAry); dout.close(); } catch (IOException e) { // Never happens... throw new IllegalStateException(e.getMessage(), e); } return bout.toByteArray(); }
From source file:org.apache.hadoop.hbase.io.hfile.TestChecksum.java
protected void testChecksumInternals(boolean useTags) throws IOException { Compression.Algorithm algo = NONE; for (boolean pread : new boolean[] { false, true }) { for (int bytesPerChecksum : BYTES_PER_CHECKSUM) { Path path = new Path(TEST_UTIL.getDataTestDir(), "checksumChunk_" + algo + bytesPerChecksum); FSDataOutputStream os = fs.create(path); HFileContext meta = new HFileContextBuilder().withCompression(algo).withIncludesMvcc(true) .withIncludesTags(useTags).withHBaseCheckSum(true).withBytesPerCheckSum(bytesPerChecksum) .withChecksumType(HFile.DEFAULT_CHECKSUM_TYPE).build(); HFileBlock.Writer hbw = new HFileBlock.Writer(null, meta); // write one block. The block has data // that is at least 6 times more than the checksum chunk size long dataSize = 0; DataOutputStream dos = hbw.startWriting(BlockType.DATA); for (; dataSize < 6 * bytesPerChecksum;) { for (int i = 0; i < 1234; ++i) { dos.writeInt(i); dataSize += 4;/*from w w w . j a va 2s . c om*/ } } hbw.writeHeaderAndData(os); long totalSize = hbw.getOnDiskSizeWithHeader(); os.close(); long expectedChunks = ChecksumUtil.numChunks(dataSize + HConstants.HFILEBLOCK_HEADER_SIZE, bytesPerChecksum); LOG.info("testChecksumChunks: pread=" + pread + ", bytesPerChecksum=" + bytesPerChecksum + ", fileSize=" + totalSize + ", dataSize=" + dataSize + ", expectedChunks=" + expectedChunks); // Verify hbase checksums. assertEquals(true, hfs.useHBaseChecksum()); // Read data back from file. FSDataInputStream is = fs.open(path); FSDataInputStream nochecksum = hfs.getNoChecksumFs().open(path); meta = new HFileContextBuilder().withCompression(algo).withIncludesMvcc(true) .withIncludesTags(useTags).withHBaseCheckSum(true).withBytesPerCheckSum(bytesPerChecksum) .build(); HFileBlock.FSReader hbr = new HFileBlock.FSReaderV2(new FSDataInputStreamWrapper(is, nochecksum), totalSize, hfs, path, meta); HFileBlock b = hbr.readBlockData(0, -1, -1, pread); is.close(); b.sanityCheck(); assertEquals(dataSize, b.getUncompressedSizeWithoutHeader()); // verify that we have the expected number of checksum chunks assertEquals(totalSize, HConstants.HFILEBLOCK_HEADER_SIZE + dataSize + expectedChunks * HFileBlock.CHECKSUM_SIZE); // assert that we did not encounter hbase checksum verification failures assertEquals(0, HFile.getChecksumFailuresCount()); } } }
From source file:org.apache.hadoop.hive.ql.exec.persistence.RowContainer1.java
private byte[] serialize(Row[] obj) throws HiveException { assert (serde != null && standardOI != null); ByteArrayOutputStream baos;//from w ww . ja v a2 s . co m DataOutputStream oos; try { baos = new ByteArrayOutputStream(); oos = new DataOutputStream(baos); oos.writeInt(obj.length); if (serde != null && standardOI != null) { for (int i = 0; i < obj.length; ++i) { Writable outVal = serde.serialize(obj[i], standardOI); outVal.write(oos); } } oos.close(); } catch (Exception e) { e.printStackTrace(); throw new HiveException(e); } return baos.toByteArray(); }
From source file:it.jnrpe.net.JNRPEProtocolPacket.java
/** * Validates the packet CRC.//from w w w .ja v a2 s. c o m * * @throws BadCRCException * If the CRC can't be validated */ public void validate() throws BadCRCException { ByteArrayOutputStream bout = new ByteArrayOutputStream(); DataOutputStream dout = new DataOutputStream(bout); try { dout.writeShort(packetVersion); dout.writeShort(packetTypeCode); dout.writeInt(0); // NO CRC dout.writeShort(resultCode); dout.write(byteBufferAry); dout.write(dummyBytesAry); dout.close(); byte[] vBytes = bout.toByteArray(); CRC32 crcAlg = new CRC32(); crcAlg.update(vBytes); if (!(((int) crcAlg.getValue()) == crcValue)) { throw new BadCRCException("Bad CRC"); } } catch (IOException e) { // Never happens... throw new IllegalStateException(e.getMessage(), e); } }
From source file:org.dasein.cloud.test.identity.IdentityResources.java
/** * @link http://stackoverflow.com/a/14582408/211197 * @return Encoded generated public key//from w w w . j a va2s . co m */ private @Nullable String generateKey() { KeyPairGenerator generator; try { generator = KeyPairGenerator.getInstance("RSA"); generator.initialize(2048); KeyPair keyPair = generator.genKeyPair(); RSAPublicKey rsaPublicKey = (RSAPublicKey) keyPair.getPublic(); ByteArrayOutputStream byteOs = new ByteArrayOutputStream(); DataOutputStream dos = new DataOutputStream(byteOs); dos.writeInt("ssh-rsa".getBytes().length); dos.write("ssh-rsa".getBytes()); dos.writeInt(rsaPublicKey.getPublicExponent().toByteArray().length); dos.write(rsaPublicKey.getPublicExponent().toByteArray()); dos.writeInt(rsaPublicKey.getModulus().toByteArray().length); dos.write(rsaPublicKey.getModulus().toByteArray()); String publicKeyEncoded = new String(Base64.encodeBase64(byteOs.toByteArray())); return "ssh-rsa " + publicKeyEncoded + " dasein"; } catch (Throwable e) { return null; } }
From source file:com.atilika.kuromoji.trie.DoubleArrayTrie.java
public void write(OutputStream output) throws IOException { baseBuffer.rewind();//from www . j a va2 s . c om checkBuffer.rewind(); tailBuffer.rewind(); int baseCheckSize = Math.min(maxBaseCheckIndex + 64, baseBuffer.capacity()); int tailSize = Math.min(tailIndex - TAIL_OFFSET + 64, tailBuffer.capacity()); DataOutputStream dataOutput = new DataOutputStream(new BufferedOutputStream(output)); dataOutput.writeBoolean(compact); dataOutput.writeInt(baseCheckSize); dataOutput.writeInt(tailSize); WritableByteChannel channel = Channels.newChannel(dataOutput); ByteBuffer tmpBuffer = ByteBuffer.allocate(baseCheckSize * 4); IntBuffer tmpIntBuffer = tmpBuffer.asIntBuffer(); tmpIntBuffer.put(baseBuffer.array(), 0, baseCheckSize); tmpBuffer.rewind(); channel.write(tmpBuffer); tmpBuffer = ByteBuffer.allocate(baseCheckSize * 4); tmpIntBuffer = tmpBuffer.asIntBuffer(); tmpIntBuffer.put(checkBuffer.array(), 0, baseCheckSize); tmpBuffer.rewind(); channel.write(tmpBuffer); tmpBuffer = ByteBuffer.allocate(tailSize * 2); CharBuffer tmpCharBuffer = tmpBuffer.asCharBuffer(); tmpCharBuffer.put(tailBuffer.array(), 0, tailSize); tmpBuffer.rewind(); channel.write(tmpBuffer); dataOutput.flush(); }
From source file:org.slc.sli.dal.encrypt.AesCipher.java
@Override public String encrypt(Object data) { if (data instanceof String) { return "ESTRING:" + encryptFromBytes(StringUtils.getBytesUtf8((String) data)); } else {/*from w w w. ja v a 2 s . c o m*/ ByteArrayOutputStream byteOutputStream = new ByteArrayOutputStream(); DataOutputStream dos = new DataOutputStream(byteOutputStream); String type; try { if (data instanceof Boolean) { dos.writeBoolean((Boolean) data); type = "EBOOL:"; } else if (data instanceof Integer) { dos.writeInt((Integer) data); type = "EINT:"; } else if (data instanceof Long) { dos.writeLong((Long) data); type = "ELONG:"; } else if (data instanceof Double) { dos.writeDouble((Double) data); type = "EDOUBLE:"; } else { throw new RuntimeException("Unsupported type: " + data.getClass().getCanonicalName()); } dos.flush(); dos.close(); } catch (IOException e) { throw new RuntimeException(e); } byte[] bytes = byteOutputStream.toByteArray(); return type + encryptFromBytes(bytes); } }
From source file:org.hyperic.hq.measurement.agent.server.MeasurementSchedule.java
private void writeSRNs() throws AgentStorageException { ByteArrayOutputStream bOs;/* w ww . j ava 2s . co m*/ DataOutputStream dOs; bOs = new ByteArrayOutputStream(); dOs = new DataOutputStream(bOs); synchronized (srnList) { try { dOs.writeInt(srnList.size()); for (SRN srn : srnList) { AppdefEntityID ent = srn.getEntity(); dOs.writeInt(ent.getType()); dOs.writeInt(ent.getID()); dOs.writeInt(srn.getRevisionNumber()); } List<Byte> bytes = Arrays.asList(ArrayUtils.toObject(bOs.toByteArray())); int size = bytes.size(); if (size > MAX_ELEM_SIZE) { store.setValue(PROP_MSRNS_LENGTH, new Integer((size / MAX_ELEM_SIZE) + 1).toString()); int ii = 0; for (int i = 0; i < size; i += MAX_ELEM_SIZE) { int start = i; int max = Math.min(i + MAX_ELEM_SIZE, size); List<Byte> subList = bytes.subList(start, max); Byte[] b = subList.toArray(new Byte[0]); store.setValue(MeasurementSchedule.PROP_MSRNS + "_" + ii++, Base64.encode(ArrayUtils.toPrimitive(b))); } } else { store.setValue(PROP_MSRNS_LENGTH, "1"); Byte[] b = bytes.toArray(new Byte[0]); store.setValue(MeasurementSchedule.PROP_MSRNS + "_0", Base64.encode(ArrayUtils.toPrimitive(b))); } } catch (IOException exc) { this.log.error("Error encoding SRN list", exc); return; } } }
From source file:org.wso2.caching.digest.DOMHASHGenerator.java
/** * This is an overloaded method for the digest generation for OMDocument * * @param document - OMDocument to be subjected to the key generation * @param digestAlgorithm - digest algorithm as a String * @return byte[] representing the calculated digest over the provided document * @throws CachingException if there is an io error or the specified algorithm is incorrect *//*from w w w. ja va2 s . c o m*/ public byte[] getDigest(OMDocument document, String digestAlgorithm) throws CachingException { byte[] digest = new byte[0]; try { MessageDigest md = MessageDigest.getInstance(digestAlgorithm); ByteArrayOutputStream baos = new ByteArrayOutputStream(); DataOutputStream dos = new DataOutputStream(baos); dos.writeInt(9); Collection childNodes = getValidElements(document); dos.writeInt(childNodes.size()); for (Iterator itr = childNodes.iterator(); itr.hasNext();) { OMNode node = (OMNode) itr.next(); if (node.getType() == OMNode.PI_NODE) dos.write(getDigest((OMProcessingInstruction) node, digestAlgorithm)); else if (node.getType() == OMNode.ELEMENT_NODE) dos.write(getDigest((OMElement) node, digestAlgorithm)); } dos.close(); md.update(baos.toByteArray()); digest = md.digest(); } catch (NoSuchAlgorithmException e) { handleException( "Can not locate the algorithm " + "provided for the digest generation : " + digestAlgorithm, e); } catch (IOException e) { handleException("Error in calculating the " + "digest value for the OMDocument : " + document, e); } return digest; }
From source file:org.apache.cassandra.db.SuperColumn.java
public void serialize(IColumn column, DataOutputStream dos) throws IOException { SuperColumn superColumn = (SuperColumn) column; dos.writeUTF(superColumn.name());//ww w . j a va 2 s . co m dos.writeInt(superColumn.getLocalDeletionTime()); dos.writeLong(superColumn.getMarkedForDeleteAt()); Collection<IColumn> columns = column.getSubColumns(); int size = columns.size(); dos.writeInt(size); dos.writeInt(superColumn.getSizeOfAllColumns()); for (IColumn subColumn : columns) { Column.serializer().serialize(subColumn, dos); } }