List of usage examples for java.io DataInput readInt
int readInt() throws IOException;
From source file:com.bigdata.dastor.utils.FBUtilities.java
public static void deserialize(TDeserializer deserializer, TBase struct, DataInput in) throws IOException { assert deserializer != null; assert struct != null; assert in != null; byte[] bytes = new byte[in.readInt()]; in.readFully(bytes);/*from w w w .j av a 2s. com*/ try { deserializer.deserialize(struct, bytes); } catch (TException ex) { throw new IOException(ex); } }
From source file:mobi.hsz.idea.gitignore.indexing.IgnoreEntryOccurrence.java
/** * Static helper to read {@link IgnoreEntryOccurrence} from the input stream. * * @param in input stream//from w w w . ja v a2 s . c o m * @return read {@link IgnoreEntryOccurrence} */ @Nullable public static synchronized IgnoreEntryOccurrence deserialize(@NotNull DataInput in) { try { String path = in.readUTF(); if (StringUtils.isEmpty(path)) { return null; } VirtualFile file = LocalFileSystem.getInstance().findFileByPath(path); if (file == null || !file.exists() || file.isDirectory()) { return null; } IgnoreEntryOccurrence entry = new IgnoreEntryOccurrence(file); int size = in.readInt(); for (int i = 0; i < size; i++) { Pattern pattern = Pattern.compile(in.readUTF()); Boolean isNegated = in.readBoolean(); entry.add(pattern, isNegated); } return entry; } catch (IOException e) { return null; } }
From source file:com.aliyun.odps.io.TupleReaderWriter.java
/** * ????Tuple// w w w . ja va2s . c om * * @param in * ??Tuplefield * @param t * ????Tuple * @throws IOException * ???Tuple?? */ public static void readTuple(DataInput in, Tuple t) throws IOException { // Make sure it's a tuple. byte b = in.readByte(); if (b != TUPLE) { String msg = "Unexpected data while reading tuple from binary file."; throw new IOException(msg); } // Read the number of fields int sz = in.readInt(); for (int i = 0; i < sz; i++) { byte type = in.readByte(); t.append(readDatum(in, type)); } }
From source file:org.apache.pig.data.DataReaderWriter.java
public static Object readDatum(DataInput in, byte type) throws IOException, ExecException { switch (type) { case DataType.TUPLE: return bytesToTuple(in); case DataType.BAG: return bytesToBag(in); case DataType.MAP: return bytesToMap(in); case DataType.INTERNALMAP: return bytesToInternalMap(in); case DataType.INTEGER: return Integer.valueOf(in.readInt()); case DataType.LONG: return Long.valueOf(in.readLong()); case DataType.FLOAT: return Float.valueOf(in.readFloat()); case DataType.DOUBLE: return Double.valueOf(in.readDouble()); case DataType.BOOLEAN: return Boolean.valueOf(in.readBoolean()); case DataType.BYTE: return Byte.valueOf(in.readByte()); case DataType.BYTEARRAY: { int size = in.readInt(); byte[] ba = new byte[size]; in.readFully(ba);/*from w w w.ja v a 2 s . co m*/ return new DataByteArray(ba); } case DataType.BIGCHARARRAY: return bytesToBigCharArray(in); case DataType.CHARARRAY: return bytesToCharArray(in); case DataType.GENERIC_WRITABLECOMPARABLE: return bytesToWritable(in); case DataType.NULL: return null; default: throw new RuntimeException("Unexpected data type " + type + " found in stream."); } }
From source file:org.apache.hadoop.hdfs.server.namenode.IngestLocal.java
static private DatanodeDescriptor[] readDatanodeDescriptorArray(DataInput in) throws IOException { DatanodeDescriptor[] locations = new DatanodeDescriptor[in.readInt()]; for (int i = 0; i < locations.length; i++) { locations[i] = new DatanodeDescriptor(); locations[i].readFieldsFromFSEditLog(in); }//from www . j a v a 2 s .c o m return locations; }
From source file:com.krawler.common.util.ByteUtil.java
public static String readUTF8(DataInput in) throws IOException { int len = in.readInt(); if (len > MAX_STRING_LEN) { throw new IOException( "String length " + len + " is too long in ByteUtil.writeUTF8(); max=" + MAX_STRING_LEN); } else if (len > 0) { byte[] buf = new byte[len]; in.readFully(buf, 0, len);/*from w w w .j a v a 2 s .c om*/ return new String(buf, "UTF-8"); } else if (len == 0) { return ""; } else if (len == -1) { return null; } else { throw new IOException("Invalid length " + len + " in ByteUtil.readUTF8()"); } }
From source file:org.apache.hadoop.hbase.security.access.AccessControlLists.java
/** * Reads a set of permissions as {@link org.apache.hadoop.io.Writable} instances * from the input stream.//from w ww. j ava 2 s . c om */ public static ListMultimap<String, TablePermission> readPermissions(byte[] data, Configuration conf) throws DeserializationException { if (ProtobufUtil.isPBMagicPrefix(data)) { int pblen = ProtobufUtil.lengthOfPBMagic(); try { AccessControlProtos.UsersAndPermissions perms = AccessControlProtos.UsersAndPermissions.newBuilder() .mergeFrom(data, pblen, data.length - pblen).build(); return ProtobufUtil.toUserTablePermissions(perms); } catch (InvalidProtocolBufferException e) { throw new DeserializationException(e); } } else { ListMultimap<String, TablePermission> perms = ArrayListMultimap.create(); try { DataInput in = new DataInputStream(new ByteArrayInputStream(data)); int length = in.readInt(); for (int i = 0; i < length; i++) { String user = Text.readString(in); List<TablePermission> userPerms = (List) HbaseObjectWritableFor96Migration.readObject(in, conf); perms.putAll(user, userPerms); } } catch (IOException e) { throw new DeserializationException(e); } return perms; } }
From source file:com.willetinc.hadoop.mapreduce.dynamodb.AttributeValueIOUtils.java
public static AttributeValue read(Types type, DataInput in) throws IOException { AttributeValue value = new AttributeValue(); switch (type) { case STRING://from ww w .jav a 2s . co m value.withS(Text.readString(in)); break; case NUMBER: value.withN(Text.readString(in)); break; case BINARY: byte[] bytes = WritableUtils.readCompressedByteArray(in); ByteBuffer buf = ByteBuffer.wrap(bytes); value.withB(buf); case STRING_SET: case NUMBER_SET: case BINARY_SET: { // handle sets int size = in.readInt(); List<AttributeValue> values = new ArrayList<AttributeValue>(size); for (int i = 0; i < size; i++) { switch (type) { case STRING_SET: values.add(read(Types.STRING, in)); break; case NUMBER_SET: values.add(read(Types.NUMBER, in)); break; case BINARY_SET: values.add(read(Types.BINARY, in)); break; default: throw new IOException("Nested sets of sets are not permitted"); } } break; } } return value; }
From source file:org.apache.hadoop.hdfs.server.namenode.FSImageSerialization.java
public static CacheDirectiveInfo readCacheDirectiveInfo(DataInput in) throws IOException { CacheDirectiveInfo.Builder builder = new CacheDirectiveInfo.Builder(); builder.setId(readLong(in));/*from www . j av a 2 s.c om*/ int flags = in.readInt(); if ((flags & 0x1) != 0) { builder.setPath(new Path(readString(in))); } if ((flags & 0x2) != 0) { builder.setReplication(readShort(in)); } if ((flags & 0x4) != 0) { builder.setPool(readString(in)); } if ((flags & 0x8) != 0) { builder.setExpiration(CacheDirectiveInfo.Expiration.newAbsolute(readLong(in))); } if ((flags & ~0xF) != 0) { throw new IOException("unknown flags set in " + "ModifyCacheDirectiveInfoOp: " + flags); } return builder.build(); }
From source file:com.fiorano.openesb.application.DmiObject.java
/** * Utility method to read a DmiObject from input stream * @param dmiType type of dmi object/* w w w.j a v a2s .c om*/ * @param in input stream * @param versionNo version * @throws IOException IOException */ public static DmiObject fromStream(int dmiType, DataInput in, int versionNo) throws IOException { try { if (in.readInt() != 0) { DmiObject dmi = DmiObject.getDatamodelObject(dmiType); dmi.fromStream(in, versionNo); return dmi; } else return null; } catch (FioranoException ex) { throw (IOException) new IOException().initCause(ex); } }