List of usage examples for java.io DataInput readUTF
String readUTF() throws IOException;
From source file:parquet.hadoop.ParquetInputSplit.java
private BlockMetaData readBlock(DataInput in) throws IOException { final BlockMetaData block = new BlockMetaData(); int size = in.readInt(); for (int i = 0; i < size; i++) { block.addColumn(readColumn(in)); }// www. j a v a 2 s . co m block.setRowCount(in.readLong()); block.setTotalByteSize(in.readLong()); if (!in.readBoolean()) { block.setPath(in.readUTF().intern()); } return block; }
From source file:parquet.hadoop.ParquetInputSplit.java
private ColumnChunkMetaData readColumn(DataInput in) throws IOException { CompressionCodecName codec = CompressionCodecName.values()[in.readInt()]; String[] columnPath = new String[in.readInt()]; for (int i = 0; i < columnPath.length; i++) { columnPath[i] = in.readUTF().intern(); }// w ww.j a v a2s . co m PrimitiveTypeName type = PrimitiveTypeName.values()[in.readInt()]; int encodingsSize = in.readInt(); Set<Encoding> encodings = new HashSet<Encoding>(encodingsSize); for (int i = 0; i < encodingsSize; i++) { encodings.add(Encoding.values()[in.readInt()]); } ColumnChunkMetaData column = ColumnChunkMetaData.get(ColumnPath.get(columnPath), type, codec, encodings, in.readLong(), in.readLong(), in.readLong(), in.readLong(), in.readLong()); return column; }
From source file:parquet.hadoop.ParquetInputSplit.java
private Map<String, String> readKeyValues(DataInput in) throws IOException { int size = in.readInt(); Map<String, String> map = new HashMap<String, String>(size); for (int i = 0; i < size; i++) { String key = in.readUTF().intern(); String value = in.readUTF().intern(); map.put(key, value);//from ww w.j av a2 s . c o m } return map; }
From source file:ph.fingra.hadoop.mapred.parts.component.domain.ComponentAppversionKey.java
@Override public void readFields(DataInput in) throws IOException { this.appkey = in.readUTF(); this.componentkey = in.readUTF(); this.appversion = in.readUTF(); this.token = in.readUTF(); this.session = in.readUTF(); }
From source file:ph.fingra.hadoop.mapred.parts.component.domain.ComponentCountryKey.java
@Override public void readFields(DataInput in) throws IOException { this.appkey = in.readUTF(); this.componentkey = in.readUTF(); this.country = in.readUTF(); this.token = in.readUTF(); this.session = in.readUTF(); }
From source file:ph.fingra.hadoop.mapred.parts.component.domain.ComponentDeviceKey.java
@Override public void readFields(DataInput in) throws IOException { this.appkey = in.readUTF(); this.componentkey = in.readUTF(); this.device = in.readUTF(); this.token = in.readUTF(); this.session = in.readUTF(); }
From source file:ph.fingra.hadoop.mapred.parts.component.domain.ComponentHourSessionKey.java
@Override public void readFields(DataInput in) throws IOException { this.appkey = in.readUTF(); this.componentkey = in.readUTF(); this.session = in.readUTF(); this.localtime = in.readUTF(); }
From source file:ph.fingra.hadoop.mapred.parts.component.domain.ComponentLanguageKey.java
@Override public void readFields(DataInput in) throws IOException { this.appkey = in.readUTF(); this.componentkey = in.readUTF(); this.language = in.readUTF(); this.token = in.readUTF(); this.session = in.readUTF(); }
From source file:ph.fingra.hadoop.mapred.parts.component.domain.ComponentOsversionKey.java
@Override public void readFields(DataInput in) throws IOException { this.appkey = in.readUTF(); this.componentkey = in.readUTF(); this.osversion = in.readUTF(); this.token = in.readUTF(); this.session = in.readUTF(); }
From source file:ph.fingra.hadoop.mapred.parts.component.domain.ComponentResolutionKey.java
@Override public void readFields(DataInput in) throws IOException { this.appkey = in.readUTF(); this.componentkey = in.readUTF(); this.resolution = in.readUTF(); this.token = in.readUTF(); this.session = in.readUTF(); }