List of usage examples for java.io DataOutput write
void write(byte b[]) throws IOException;
b
. From source file:org.apache.hadoop.hbase.io.HbaseObjectWritable.java
/** * Write a {@link Writable}, {@link String}, primitive type, or an array of * the preceding.//from w w w . java 2 s .co m * @param out * @param instance * @param declaredClass * @param conf * @throws IOException */ @SuppressWarnings("unchecked") public static void writeObject(DataOutput out, Object instance, Class declaredClass, Configuration conf) throws IOException { Object instanceObj = instance; Class declClass = declaredClass; if (instanceObj == null) { // null instanceObj = new NullInstance(declClass, conf); declClass = Writable.class; } writeClassCode(out, declClass); if (declClass.isArray()) { // array // If bytearray, just dump it out -- avoid the recursion and // byte-at-a-time we were previously doing. if (declClass.equals(byte[].class)) { Bytes.writeByteArray(out, (byte[]) instanceObj); } else if (declClass.equals(Result[].class)) { Result.writeArray(out, (Result[]) instanceObj); } else { //if it is a Generic array, write the element's type if (getClassCode(declaredClass) == GENERIC_ARRAY_CODE) { Class<?> componentType = declaredClass.getComponentType(); writeClass(out, componentType); } int length = Array.getLength(instanceObj); out.writeInt(length); for (int i = 0; i < length; i++) { Object item = Array.get(instanceObj, i); writeObject(out, item, item.getClass(), conf); } } } else if (List.class.isAssignableFrom(declClass)) { List list = (List) instanceObj; int length = list.size(); out.writeInt(length); for (int i = 0; i < length; i++) { Object elem = list.get(i); writeObject(out, elem, elem == null ? Writable.class : elem.getClass(), conf); } } else if (declClass == String.class) { // String Text.writeString(out, (String) instanceObj); } else if (declClass.isPrimitive()) { // primitive type if (declClass == Boolean.TYPE) { // boolean out.writeBoolean(((Boolean) instanceObj).booleanValue()); } else if (declClass == Character.TYPE) { // char out.writeChar(((Character) instanceObj).charValue()); } else if (declClass == Byte.TYPE) { // byte out.writeByte(((Byte) instanceObj).byteValue()); } else if (declClass == Short.TYPE) { // short out.writeShort(((Short) instanceObj).shortValue()); } else if (declClass == Integer.TYPE) { // int out.writeInt(((Integer) instanceObj).intValue()); } else if (declClass == Long.TYPE) { // long out.writeLong(((Long) instanceObj).longValue()); } else if (declClass == Float.TYPE) { // float out.writeFloat(((Float) instanceObj).floatValue()); } else if (declClass == Double.TYPE) { // double out.writeDouble(((Double) instanceObj).doubleValue()); } else if (declClass == Void.TYPE) { // void } else { throw new IllegalArgumentException("Not a primitive: " + declClass); } } else if (declClass.isEnum()) { // enum Text.writeString(out, ((Enum) instanceObj).name()); } else if (Message.class.isAssignableFrom(declaredClass)) { Text.writeString(out, instanceObj.getClass().getName()); ((Message) instance).writeDelimitedTo(DataOutputOutputStream.constructOutputStream(out)); } else if (Writable.class.isAssignableFrom(declClass)) { // Writable Class<?> c = instanceObj.getClass(); Integer code = CLASS_TO_CODE.get(c); if (code == null) { out.writeByte(NOT_ENCODED); Text.writeString(out, c.getName()); } else { writeClassCode(out, c); } ((Writable) instanceObj).write(out); } else if (Serializable.class.isAssignableFrom(declClass)) { Class<?> c = instanceObj.getClass(); Integer code = CLASS_TO_CODE.get(c); if (code == null) { out.writeByte(NOT_ENCODED); Text.writeString(out, c.getName()); } else { writeClassCode(out, c); } ByteArrayOutputStream bos = null; ObjectOutputStream oos = null; try { bos = new ByteArrayOutputStream(); oos = new ObjectOutputStream(bos); oos.writeObject(instanceObj); byte[] value = bos.toByteArray(); out.writeInt(value.length); out.write(value); } finally { if (bos != null) bos.close(); if (oos != null) oos.close(); } } else { throw new IOException("Can't write: " + instanceObj + " as " + declClass); } }
From source file:org.apache.hadoop.hbase.io.hfile.TestHFile.java
private void writeNumMetablocks(Writer writer, int n) { for (int i = 0; i < n; i++) { writer.appendMetaBlock("HFileMeta" + i, new Writable() { private int val; public Writable setVal(int val) { this.val = val; return this; }/* w ww.j a v a 2 s . co m*/ @Override public void write(DataOutput out) throws IOException { out.write(("something to test" + val).getBytes()); } @Override public void readFields(DataInput in) throws IOException { } }.setVal(i)); } }
From source file:org.apache.hadoop.hbase.security.access.HbaseObjectWritableFor96Migration.java
/** * Write a {@link Writable}, {@link String}, primitive type, or an array of * the preceding.//from w w w .j av a 2s .c o m * @param out * @param instance * @param declaredClass * @param conf * @throws IOException */ @SuppressWarnings("unchecked") static void writeObject(DataOutput out, Object instance, Class declaredClass, Configuration conf) throws IOException { Object instanceObj = instance; Class declClass = declaredClass; if (instanceObj == null) { // null instanceObj = new NullInstance(declClass, conf); declClass = Writable.class; } writeClassCode(out, declClass); if (declClass.isArray()) { // array // If bytearray, just dump it out -- avoid the recursion and // byte-at-a-time we were previously doing. if (declClass.equals(byte[].class)) { Bytes.writeByteArray(out, (byte[]) instanceObj); } else { //if it is a Generic array, write the element's type if (getClassCode(declaredClass) == GENERIC_ARRAY_CODE) { Class<?> componentType = declaredClass.getComponentType(); writeClass(out, componentType); } int length = Array.getLength(instanceObj); out.writeInt(length); for (int i = 0; i < length; i++) { Object item = Array.get(instanceObj, i); writeObject(out, item, item.getClass(), conf); } } } else if (List.class.isAssignableFrom(declClass)) { List list = (List) instanceObj; int length = list.size(); out.writeInt(length); for (int i = 0; i < length; i++) { Object elem = list.get(i); writeObject(out, elem, elem == null ? Writable.class : elem.getClass(), conf); } } else if (declClass == String.class) { // String Text.writeString(out, (String) instanceObj); } else if (declClass.isPrimitive()) { // primitive type if (declClass == Boolean.TYPE) { // boolean out.writeBoolean(((Boolean) instanceObj).booleanValue()); } else if (declClass == Character.TYPE) { // char out.writeChar(((Character) instanceObj).charValue()); } else if (declClass == Byte.TYPE) { // byte out.writeByte(((Byte) instanceObj).byteValue()); } else if (declClass == Short.TYPE) { // short out.writeShort(((Short) instanceObj).shortValue()); } else if (declClass == Integer.TYPE) { // int out.writeInt(((Integer) instanceObj).intValue()); } else if (declClass == Long.TYPE) { // long out.writeLong(((Long) instanceObj).longValue()); } else if (declClass == Float.TYPE) { // float out.writeFloat(((Float) instanceObj).floatValue()); } else if (declClass == Double.TYPE) { // double out.writeDouble(((Double) instanceObj).doubleValue()); } else if (declClass == Void.TYPE) { // void } else { throw new IllegalArgumentException("Not a primitive: " + declClass); } } else if (declClass.isEnum()) { // enum Text.writeString(out, ((Enum) instanceObj).name()); } else if (Message.class.isAssignableFrom(declaredClass)) { Text.writeString(out, instanceObj.getClass().getName()); ((Message) instance).writeDelimitedTo(DataOutputOutputStream.constructOutputStream(out)); } else if (Writable.class.isAssignableFrom(declClass)) { // Writable Class<?> c = instanceObj.getClass(); Integer code = CLASS_TO_CODE.get(c); if (code == null) { out.writeByte(NOT_ENCODED); Text.writeString(out, c.getName()); } else { writeClassCode(out, c); } ((Writable) instanceObj).write(out); } else if (Serializable.class.isAssignableFrom(declClass)) { Class<?> c = instanceObj.getClass(); Integer code = CLASS_TO_CODE.get(c); if (code == null) { out.writeByte(NOT_ENCODED); Text.writeString(out, c.getName()); } else { writeClassCode(out, c); } ByteArrayOutputStream bos = null; ObjectOutputStream oos = null; try { bos = new ByteArrayOutputStream(); oos = new ObjectOutputStream(bos); oos.writeObject(instanceObj); byte[] value = bos.toByteArray(); out.writeInt(value.length); out.write(value); } finally { if (bos != null) bos.close(); if (oos != null) oos.close(); } } else if (Scan.class.isAssignableFrom(declClass)) { Scan scan = (Scan) instanceObj; byte[] scanBytes = ProtobufUtil.toScan(scan).toByteArray(); out.writeInt(scanBytes.length); out.write(scanBytes); } else { throw new IOException("Can't write: " + instanceObj + " as " + declClass); } }
From source file:org.apache.hadoop.hdfs.server.namenode.FSImageSerialization.java
public static void writeBytes(byte[] data, DataOutput out) throws IOException { out.writeShort(data.length);//from w w w .jav a 2 s.c o m out.write(data); }
From source file:org.apache.hadoop.hive.ql.exec.ACLTask.java
private int showGroup(Hive db, showGroupsDesc sGD) throws HiveException { List<String> groups = null; if (sGD.getPattern() != null) { LOG.info("pattern: " + sGD.getPattern()); groups = db.getGroups(sGD.getPattern()); LOG.info("results : " + groups.size()); } else/* w w w .ja v a2 s.c o m*/ groups = db.getGroups(".*"); try { FileSystem fs = sGD.getResFile().getFileSystem(conf); DataOutput outStream = (DataOutput) fs.create(sGD.getResFile()); SortedSet<String> sortedTbls = new TreeSet<String>(groups); Iterator<String> iterTbls = sortedTbls.iterator(); while (iterTbls.hasNext()) { outStream.writeBytes(iterTbls.next()); outStream.write(terminator); } ((FSDataOutputStream) outStream).close(); } catch (FileNotFoundException e) { LOG.warn("show groups: " + StringUtils.stringifyException(e)); if (SessionState.get() != null) SessionState.get().ssLog("show groups: " + StringUtils.stringifyException(e)); return 1; } catch (IOException e) { LOG.warn("show groups: " + StringUtils.stringifyException(e)); if (SessionState.get() != null) SessionState.get().ssLog("show groups: " + StringUtils.stringifyException(e)); return 1; } catch (Exception e) { throw new HiveException(e.toString()); } return 0; }
From source file:org.apache.hadoop.hive.ql.exec.ACLTask.java
private int showRoles(Hive db, showRolesDesc showRolesD) throws HiveException { List<String> roles; if (showRolesD.getUser() == null) roles = db.showRoles(showRolesD.getWho()); else/* ww w .j a v a 2 s . c o m*/ return 0; try { FileSystem fs = showRolesD.getTmpFile().getFileSystem(conf); DataOutput outStream = (DataOutput) fs.create(showRolesD.getTmpFile()); LOG.info("show roles tmp file:" + showRolesD.getTmpFile().toString()); SortedSet<String> sortedRoles = new TreeSet<String>(roles); Iterator<String> iterRoles = sortedRoles.iterator(); outStream.writeBytes("ALL roles in TDW:"); outStream.write(terminator); while (iterRoles.hasNext()) { outStream.writeBytes(iterRoles.next()); outStream.write(terminator); } ((FSDataOutputStream) outStream).close(); } catch (FileNotFoundException e) { LOG.warn("show roles: " + StringUtils.stringifyException(e)); return 1; } catch (IOException e) { LOG.warn("show roles: " + StringUtils.stringifyException(e)); return 1; } catch (Exception e) { throw new HiveException(e.toString()); } LOG.info("show roles OK"); return 0; }
From source file:org.apache.hadoop.hive.ql.exec.ACLTask.java
private int showUsers(Hive db, showUsersDesc showUsersD) throws HiveException { List<String> users = db.showUsers(showUsersD.getWho()); try {//ww w . j av a2 s .co m FileSystem fs = showUsersD.getTmpFile().getFileSystem(conf); DataOutput outStream = (DataOutput) fs.create(showUsersD.getTmpFile()); SortedSet<String> sortedUsers = new TreeSet<String>(users); Iterator<String> iterUsers = sortedUsers.iterator(); outStream.writeBytes("All users in TDW:"); outStream.write(terminator); while (iterUsers.hasNext()) { outStream.writeBytes(iterUsers.next()); outStream.write(terminator); } ((FSDataOutputStream) outStream).close(); } catch (FileNotFoundException e) { LOG.warn("show users: " + StringUtils.stringifyException(e)); return 1; } catch (IOException e) { LOG.warn("show users: " + StringUtils.stringifyException(e)); return 1; } catch (Exception e) { throw new HiveException(e.toString()); } LOG.info("show users OK"); return 0; }
From source file:org.apache.hadoop.hive.ql.exec.ACLTask.java
private int showGrants(Hive db, showGrantsDesc showGrantsD) throws HiveException { List<String> grants = db.showGrants(showGrantsD.getWho(), showGrantsD.getUser()); try {/*from w w w . j av a 2 s . c om*/ FileSystem fs = showGrantsD.getTmpFile().getFileSystem(conf); DataOutput outStream = (DataOutput) fs.create(showGrantsD.getTmpFile()); Iterator<String> iterGrants = grants.iterator(); while (iterGrants.hasNext()) { outStream.writeBytes(iterGrants.next()); outStream.write(terminator); } ((FSDataOutputStream) outStream).close(); } catch (FileNotFoundException e) { LOG.warn("show grants: " + StringUtils.stringifyException(e)); return 1; } catch (IOException e) { LOG.warn("show grants: " + StringUtils.stringifyException(e)); return 1; } catch (Exception e) { throw new HiveException(e.toString()); } return 0; }
From source file:org.apache.hadoop.hive.ql.exec.DCLTask.java
private int showGrants(ShowGrantsDesc showGntsDesc) throws HiveException, AuthorizeException { String userName = showGntsDesc.getUser(); if (userName == null) { userName = SessionState.get().getUserName(); }//w w w . java 2s.c o m User user = db.getUser(userName); try { if (user == null) { FileSystem fs = showGntsDesc.getResFile().getFileSystem(conf); DataOutput outStream = (DataOutput) fs.open(showGntsDesc.getResFile()); String errMsg = "User " + userName + " does not exist"; outStream.write(errMsg.getBytes("UTF-8")); ((FSDataOutputStream) outStream).close(); return 0; } } catch (FileNotFoundException e) { LOG.info("show grants: " + StringUtils.stringifyException(e)); return 1; } catch (IOException e) { LOG.info("show grants: " + StringUtils.stringifyException(e)); return 1; } try { LOG.info("DCLTask: got grant privilege for " + user.getName()); FileSystem fs = showGntsDesc.getResFile().getFileSystem(conf); DataOutput outStream = (DataOutput) fs.create(showGntsDesc.getResFile()); List<AuthorizeEntry> entries = SessionState.get().getAuthorizer().getAllPrivileges(userName); if (entries == null || entries.isEmpty()) { return 0; } for (AuthorizeEntry e : entries) { switch (e.getPrivLevel()) { case GLOBAL_LEVEL: outStream.writeBytes("Global grants: "); break; case DATABASE_LEVEL: outStream.writeBytes(String.format("Grants on database %s:", e.getDb().getName())); break; case TABLE_LEVEL: outStream.writeBytes(String.format("Grants on table %s.%s:", e.getTable().getDbName(), e.getTable().getTableName())); break; case COLUMN_LEVEL: String fields = ""; if (e.getFields() != null && !e.getFields().isEmpty()) { for (FieldSchema f : e.getFields()) { fields += f.getName() + ","; } } else { fields = "<null>"; } outStream.writeBytes(String.format("Grants on column %s.%s.[%s]:", e.getTable().getDbName(), e.getTable().getTableName(), fields)); break; default: } for (Privilege p : e.getRequiredPrivs()) { outStream.writeBytes(p.toString() + " "); } outStream.write(terminator); } LOG.info("DCLTask: written data for " + user.getName()); ((FSDataOutputStream) outStream).close(); } catch (FileNotFoundException e) { LOG.info("show grants: " + StringUtils.stringifyException(e)); return 1; } catch (IOException e) { LOG.info("show grants: " + StringUtils.stringifyException(e)); return 1; } catch (Exception e) { throw new HiveException(e.toString()); } return 0; }
From source file:org.apache.hadoop.hive.serde2.io.BigDecimalWritable.java
@Override public void write(DataOutput out) throws IOException { WritableUtils.writeVInt(out, scale); WritableUtils.writeVInt(out, internalStorage.length); out.write(internalStorage); }