List of usage examples for java.io DataOutput writeByte
void writeByte(int v) throws IOException;
v
. From source file:org.apache.hadoop.hbase.hbql.filter.RecordFilterList.java
public void write(final DataOutput out) throws IOException { Configuration conf = HBaseConfiguration.create(); out.writeByte(operator.ordinal()); out.writeInt(filters.size());//from w w w . j av a 2s. c o m for (Filter filter : filters) { HbaseObjectWritable.writeObject(out, filter, Writable.class, conf); } }
From source file:org.apache.hadoop.hbase.HRegionInfo.java
/** * @deprecated Use protobuf serialization instead. See {@link #toByteArray()} and * {@link #toDelimitedByteArray()}/*from w w w .j a v a 2 s .co m*/ */ @Deprecated public void write(DataOutput out) throws IOException { out.writeByte(getVersion()); Bytes.writeByteArray(out, endKey); out.writeBoolean(offLine); out.writeLong(regionId); Bytes.writeByteArray(out, regionName); out.writeBoolean(split); Bytes.writeByteArray(out, startKey); Bytes.writeByteArray(out, tableName.getName()); out.writeInt(hashCode); }
From source file:org.apache.hadoop.hbase.io.HbaseObjectWritable.java
/** * Write a {@link Writable}, {@link String}, primitive type, or an array of * the preceding.//from ww w .j av a 2s. c o m * @param out * @param instance * @param declaredClass * @param conf * @throws IOException */ @SuppressWarnings("unchecked") public static void writeObject(DataOutput out, Object instance, Class declaredClass, Configuration conf) throws IOException { Object instanceObj = instance; Class declClass = declaredClass; if (instanceObj == null) { // null instanceObj = new NullInstance(declClass, conf); declClass = Writable.class; } writeClassCode(out, declClass); if (declClass.isArray()) { // array // If bytearray, just dump it out -- avoid the recursion and // byte-at-a-time we were previously doing. if (declClass.equals(byte[].class)) { Bytes.writeByteArray(out, (byte[]) instanceObj); } else if (declClass.equals(Result[].class)) { Result.writeArray(out, (Result[]) instanceObj); } else { //if it is a Generic array, write the element's type if (getClassCode(declaredClass) == GENERIC_ARRAY_CODE) { Class<?> componentType = declaredClass.getComponentType(); writeClass(out, componentType); } int length = Array.getLength(instanceObj); out.writeInt(length); for (int i = 0; i < length; i++) { Object item = Array.get(instanceObj, i); writeObject(out, item, item.getClass(), conf); } } } else if (List.class.isAssignableFrom(declClass)) { List list = (List) instanceObj; int length = list.size(); out.writeInt(length); for (int i = 0; i < length; i++) { Object elem = list.get(i); writeObject(out, elem, elem == null ? Writable.class : elem.getClass(), conf); } } else if (declClass == String.class) { // String Text.writeString(out, (String) instanceObj); } else if (declClass.isPrimitive()) { // primitive type if (declClass == Boolean.TYPE) { // boolean out.writeBoolean(((Boolean) instanceObj).booleanValue()); } else if (declClass == Character.TYPE) { // char out.writeChar(((Character) instanceObj).charValue()); } else if (declClass == Byte.TYPE) { // byte out.writeByte(((Byte) instanceObj).byteValue()); } else if (declClass == Short.TYPE) { // short out.writeShort(((Short) instanceObj).shortValue()); } else if (declClass == Integer.TYPE) { // int out.writeInt(((Integer) instanceObj).intValue()); } else if (declClass == Long.TYPE) { // long out.writeLong(((Long) instanceObj).longValue()); } else if (declClass == Float.TYPE) { // float out.writeFloat(((Float) instanceObj).floatValue()); } else if (declClass == Double.TYPE) { // double out.writeDouble(((Double) instanceObj).doubleValue()); } else if (declClass == Void.TYPE) { // void } else { throw new IllegalArgumentException("Not a primitive: " + declClass); } } else if (declClass.isEnum()) { // enum Text.writeString(out, ((Enum) instanceObj).name()); } else if (Message.class.isAssignableFrom(declaredClass)) { Text.writeString(out, instanceObj.getClass().getName()); ((Message) instance).writeDelimitedTo(DataOutputOutputStream.constructOutputStream(out)); } else if (Writable.class.isAssignableFrom(declClass)) { // Writable Class<?> c = instanceObj.getClass(); Integer code = CLASS_TO_CODE.get(c); if (code == null) { out.writeByte(NOT_ENCODED); Text.writeString(out, c.getName()); } else { writeClassCode(out, c); } ((Writable) instanceObj).write(out); } else if (Serializable.class.isAssignableFrom(declClass)) { Class<?> c = instanceObj.getClass(); Integer code = CLASS_TO_CODE.get(c); if (code == null) { out.writeByte(NOT_ENCODED); Text.writeString(out, c.getName()); } else { writeClassCode(out, c); } ByteArrayOutputStream bos = null; ObjectOutputStream oos = null; try { bos = new ByteArrayOutputStream(); oos = new ObjectOutputStream(bos); oos.writeObject(instanceObj); byte[] value = bos.toByteArray(); out.writeInt(value.length); out.write(value); } finally { if (bos != null) bos.close(); if (oos != null) oos.close(); } } else { throw new IOException("Can't write: " + instanceObj + " as " + declClass); } }
From source file:org.apache.hadoop.hbase.security.access.HbaseObjectWritableFor96Migration.java
/** * Write a {@link Writable}, {@link String}, primitive type, or an array of * the preceding./* w ww .j ava 2s .co m*/ * @param out * @param instance * @param declaredClass * @param conf * @throws IOException */ @SuppressWarnings("unchecked") static void writeObject(DataOutput out, Object instance, Class declaredClass, Configuration conf) throws IOException { Object instanceObj = instance; Class declClass = declaredClass; if (instanceObj == null) { // null instanceObj = new NullInstance(declClass, conf); declClass = Writable.class; } writeClassCode(out, declClass); if (declClass.isArray()) { // array // If bytearray, just dump it out -- avoid the recursion and // byte-at-a-time we were previously doing. if (declClass.equals(byte[].class)) { Bytes.writeByteArray(out, (byte[]) instanceObj); } else { //if it is a Generic array, write the element's type if (getClassCode(declaredClass) == GENERIC_ARRAY_CODE) { Class<?> componentType = declaredClass.getComponentType(); writeClass(out, componentType); } int length = Array.getLength(instanceObj); out.writeInt(length); for (int i = 0; i < length; i++) { Object item = Array.get(instanceObj, i); writeObject(out, item, item.getClass(), conf); } } } else if (List.class.isAssignableFrom(declClass)) { List list = (List) instanceObj; int length = list.size(); out.writeInt(length); for (int i = 0; i < length; i++) { Object elem = list.get(i); writeObject(out, elem, elem == null ? Writable.class : elem.getClass(), conf); } } else if (declClass == String.class) { // String Text.writeString(out, (String) instanceObj); } else if (declClass.isPrimitive()) { // primitive type if (declClass == Boolean.TYPE) { // boolean out.writeBoolean(((Boolean) instanceObj).booleanValue()); } else if (declClass == Character.TYPE) { // char out.writeChar(((Character) instanceObj).charValue()); } else if (declClass == Byte.TYPE) { // byte out.writeByte(((Byte) instanceObj).byteValue()); } else if (declClass == Short.TYPE) { // short out.writeShort(((Short) instanceObj).shortValue()); } else if (declClass == Integer.TYPE) { // int out.writeInt(((Integer) instanceObj).intValue()); } else if (declClass == Long.TYPE) { // long out.writeLong(((Long) instanceObj).longValue()); } else if (declClass == Float.TYPE) { // float out.writeFloat(((Float) instanceObj).floatValue()); } else if (declClass == Double.TYPE) { // double out.writeDouble(((Double) instanceObj).doubleValue()); } else if (declClass == Void.TYPE) { // void } else { throw new IllegalArgumentException("Not a primitive: " + declClass); } } else if (declClass.isEnum()) { // enum Text.writeString(out, ((Enum) instanceObj).name()); } else if (Message.class.isAssignableFrom(declaredClass)) { Text.writeString(out, instanceObj.getClass().getName()); ((Message) instance).writeDelimitedTo(DataOutputOutputStream.constructOutputStream(out)); } else if (Writable.class.isAssignableFrom(declClass)) { // Writable Class<?> c = instanceObj.getClass(); Integer code = CLASS_TO_CODE.get(c); if (code == null) { out.writeByte(NOT_ENCODED); Text.writeString(out, c.getName()); } else { writeClassCode(out, c); } ((Writable) instanceObj).write(out); } else if (Serializable.class.isAssignableFrom(declClass)) { Class<?> c = instanceObj.getClass(); Integer code = CLASS_TO_CODE.get(c); if (code == null) { out.writeByte(NOT_ENCODED); Text.writeString(out, c.getName()); } else { writeClassCode(out, c); } ByteArrayOutputStream bos = null; ObjectOutputStream oos = null; try { bos = new ByteArrayOutputStream(); oos = new ObjectOutputStream(bos); oos.writeObject(instanceObj); byte[] value = bos.toByteArray(); out.writeInt(value.length); out.write(value); } finally { if (bos != null) bos.close(); if (oos != null) oos.close(); } } else if (Scan.class.isAssignableFrom(declClass)) { Scan scan = (Scan) instanceObj; byte[] scanBytes = ProtobufUtil.toScan(scan).toByteArray(); out.writeInt(scanBytes.length); out.write(scanBytes); } else { throw new IOException("Can't write: " + instanceObj + " as " + declClass); } }
From source file:org.apache.hadoop.hbase.security.access.Permission.java
@Override public void write(DataOutput out) throws IOException { super.write(out); out.writeByte(actions != null ? actions.length : 0); if (actions != null) { for (Action a : actions) { out.writeByte(a.code());//from w w w . ja v a 2 s. c o m } } }
From source file:org.apache.hadoop.hive.ql.io.orc.OrcSplit.java
@Override public void write(DataOutput out) throws IOException { //serialize path, offset, length using FileSplit super.write(out); int flags = (hasBase ? BASE_FLAG : 0) | (isOriginal ? ORIGINAL_FLAG : 0) | (hasFooter ? FOOTER_FLAG : 0) | (fileId != null ? HAS_FILEID_FLAG : 0); out.writeByte(flags); out.writeInt(deltas.size());/*ww w. j a v a 2s . c om*/ for (AcidInputFormat.DeltaMetaData delta : deltas) { delta.write(out); } if (hasFooter) { // serialize FileMetaInfo fields Text.writeString(out, fileMetaInfo.compressionType); WritableUtils.writeVInt(out, fileMetaInfo.bufferSize); WritableUtils.writeVInt(out, fileMetaInfo.metadataSize); // serialize FileMetaInfo field footer ByteBuffer footerBuff = fileMetaInfo.footerBuffer; footerBuff.reset(); // write length of buffer WritableUtils.writeVInt(out, footerBuff.limit() - footerBuff.position()); out.write(footerBuff.array(), footerBuff.position(), footerBuff.limit() - footerBuff.position()); WritableUtils.writeVInt(out, fileMetaInfo.writerVersion.getId()); } if (fileId != null) { out.writeLong(fileId.longValue()); } }
From source file:org.apache.hadoop.io.UTF8.java
private static void writeChars(DataOutput out, String s, int start, int length) throws IOException { final int end = start + length; for (int i = start; i < end; i++) { int code = s.charAt(i); if (code >= 0x01 && code <= 0x7F) { out.writeByte((byte) code); } else if (code <= 0x07FF) { out.writeByte((byte) (0xC0 | ((code >> 6) & 0x1F))); out.writeByte((byte) (0x80 | code & 0x3F)); } else {/* w w w . ja v a 2 s . co m*/ out.writeByte((byte) (0xE0 | ((code >> 12) & 0X0F))); out.writeByte((byte) (0x80 | ((code >> 6) & 0x3F))); out.writeByte((byte) (0x80 | (code & 0x3F))); } } }
From source file:org.apache.hama.graph.GraphJobMessage.java
@Override public void write(DataOutput out) throws IOException { out.writeByte(this.flag); if (isVertexMessage()) { // we don't need to write the classes because the other side has the // same/*from w ww.j a va 2 s.c o m*/ // classes for the two entities. vertexId.write(out); vertexValue.write(out); } else if (isMapMessage()) { map.write(out); } else if (isPartitioningMessage()) { //LOG.info("????????????????"); //LOG.info("vertex.getVertexID() : "+ vertex.getVertexID().toString()); vertex.getVertexID().write(out); //LOG.info("vertex.getValue() : " + vertex.getValue()); if (vertex.getValue() != null) { //LOG.info("vertexvalue"); out.writeBoolean(true); vertex.getValue().write(out); } else { out.writeBoolean(false); } List<?> outEdges = vertex.getEdges(); out.writeInt(outEdges.size()); //LOG.info("?: " + outEdges.size()); for (Object e : outEdges) { Edge<?, ?> edge = (Edge<?, ?>) e; //LOG.info(" : " + edge); edge.getDestinationVertexID().write(out); if (edge.getValue() != null) { out.writeBoolean(true); edge.getValue().write(out); } else { //LOG.info(",?dge.getValue()==null"); out.writeBoolean(false); } } } else if (isVerticesSizeMessage()) { vertices_size.write(out); } else if (isBoundaryVertexSizeMessage()) { boundaryVertex_size.write(out); } else { vertexId.write(out); } }
From source file:org.apache.hawq.pxf.service.io.GPDBWritable.java
@Override public void write(DataOutput out) throws IOException { int numCol = colType.length; boolean[] nullBits = new boolean[numCol]; int[] colLength = new int[numCol]; byte[] enumType = new byte[numCol]; int[] padLength = new int[numCol]; byte[] padbytes = new byte[8]; /**/*from w ww . j a v a 2 s . c o m*/ * Compute the total payload and header length * header = total length (4 byte), Version (2 byte), Error (1 byte), #col (2 byte) * col type array = #col * 1 byte * null bit array = ceil(#col/8) */ int datlen = 4 + 2 + 1 + 2; datlen += numCol; datlen += getNullByteArraySize(numCol); for (int i = 0; i < numCol; i++) { /* Get the enum type */ DBType coldbtype; switch (DataType.get(colType[i])) { case BIGINT: coldbtype = DBType.BIGINT; break; case BOOLEAN: coldbtype = DBType.BOOLEAN; break; case FLOAT8: coldbtype = DBType.FLOAT8; break; case INTEGER: coldbtype = DBType.INTEGER; break; case REAL: coldbtype = DBType.REAL; break; case SMALLINT: coldbtype = DBType.SMALLINT; break; case BYTEA: coldbtype = DBType.BYTEA; break; default: coldbtype = DBType.TEXT; } enumType[i] = (byte) (coldbtype.ordinal()); /* Get the actual value, and set the null bit */ if (colValue[i] == null) { nullBits[i] = true; colLength[i] = 0; } else { nullBits[i] = false; /* * For fixed length type, we get the fixed length. * For var len binary format, the length is in the col value. * For text format, we must convert encoding first. */ if (!coldbtype.isVarLength()) { colLength[i] = coldbtype.getTypeLength(); } else if (!isTextForm(colType[i])) { colLength[i] = ((byte[]) colValue[i]).length; } else { colLength[i] = ((String) colValue[i]).getBytes(CHARSET).length; } /* calculate and add the type alignment padding */ padLength[i] = roundUpAlignment(datlen, coldbtype.getAlignment()) - datlen; datlen += padLength[i]; /* for variable length type, we add a 4 byte length header */ if (coldbtype.isVarLength()) { datlen += 4; } } datlen += colLength[i]; } /* * Add the final alignment padding for the next record */ int endpadding = roundUpAlignment(datlen, 8) - datlen; datlen += endpadding; /* Construct the packet header */ out.writeInt(datlen); out.writeShort(VERSION); out.writeByte(errorFlag); out.writeShort(numCol); /* Write col type */ for (int i = 0; i < numCol; i++) { out.writeByte(enumType[i]); } /* Nullness */ byte[] nullBytes = boolArrayToByteArray(nullBits); out.write(nullBytes); /* Column Value */ for (int i = 0; i < numCol; i++) { if (!nullBits[i]) { /* Pad the alignment byte first */ if (padLength[i] > 0) { out.write(padbytes, 0, padLength[i]); } /* Now, write the actual column value */ switch (DataType.get(colType[i])) { case BIGINT: out.writeLong(((Long) colValue[i])); break; case BOOLEAN: out.writeBoolean(((Boolean) colValue[i])); break; case FLOAT8: out.writeDouble(((Double) colValue[i])); break; case INTEGER: out.writeInt(((Integer) colValue[i])); break; case REAL: out.writeFloat(((Float) colValue[i])); break; case SMALLINT: out.writeShort(((Short) colValue[i])); break; /* For BYTEA format, add 4byte length header at the beginning */ case BYTEA: out.writeInt(colLength[i]); out.write((byte[]) colValue[i]); break; /* For text format, add 4byte length header. string is already '\0' terminated */ default: { out.writeInt(colLength[i]); byte[] data = ((String) colValue[i]).getBytes(CHARSET); out.write(data); break; } } } } /* End padding */ out.write(padbytes, 0, endpadding); }
From source file:org.apache.isis.objectstore.nosql.db.file.server.FileServer.java
private void startSyncing() { final String syncHost = config.getString("fileserver.sync-host", DEFAULT_HOST); final int syncPort = config.getInt("fileserver.sync-port", DEFAULT_SYNC_PORT); final int connectionTimeout = config.getInt("fileserver.connection.timeout", 5000); LOG.info("preparing to sync to secondary server on " + syncHost + " port " + syncPort); final InetAddress address; try {//w w w . ja va 2 s . c om address = InetAddress.getByName(syncHost); } catch (final UnknownHostException e) { LOG.error("Unknown host " + syncHost, e); System.exit(0); return; } while (awaitConnections) { Socket socket = null; try { socket = new Socket(address, syncPort); LOG.info("sync connected to " + socket.getInetAddress().getHostAddress() + " port " + socket.getLocalPort()); final CRC32 crc32 = new CRC32(); final DataOutput output = new DataOutputStream( new CheckedOutputStream(socket.getOutputStream(), crc32)); final DataInput input = new DataInputStream(socket.getInputStream()); output.writeByte(INIT); long logId = input.readLong(); do { final long nextLogId = logId + 1; final File file = Util.logFile(nextLogId); if (file.exists() && server.getLogger().isWritten(nextLogId)) { logId++; output.writeByte(RECOVERY_LOG); crc32.reset(); output.writeLong(logId); LOG.info("sending recovery file: " + file.getName()); final BufferedInputStream fileInput = new BufferedInputStream(new FileInputStream(file)); final byte[] buffer = new byte[8092]; int read; while ((read = fileInput.read(buffer)) > 0) { output.writeInt(read); output.write(buffer, 0, read); } output.writeInt(0); output.writeLong(crc32.getValue()); } try { Thread.sleep(300); } catch (final InterruptedException ignore) { } while (isQuiescent) { try { Thread.sleep(300); } catch (final InterruptedException ignore) { } } } while (awaitConnections); } catch (final ConnectException e) { LOG.warn("not yet connected to secondary server at " + syncHost + " port " + syncPort); try { Thread.sleep(connectionTimeout); } catch (final InterruptedException ignore) { } } catch (final IOException e) { LOG.error("start failure - networking not set up for " + syncHost, e); try { Thread.sleep(300); } catch (final InterruptedException ignore) { } } catch (final RuntimeException e) { LOG.error("start failure", e); try { Thread.sleep(300); } catch (final InterruptedException ignore) { } } } }