List of usage examples for org.apache.hadoop.io Text readFields
@Override public void readFields(DataInput in) throws IOException
From source file:com.marklogic.mapreduce.MarkLogicInputSplit.java
License:Apache License
@Override public void readFields(DataInput in) throws IOException { start = in.readLong();/*from w w w . ja v a 2 s . co m*/ length = in.readLong(); Text forestIdText = new Text(); forestIdText.readFields(in); forestId = new BigInteger(forestIdText.getBytes()); hostName = new String[1]; hostName[0] = Text.readString(in); isLastSplit = in.readBoolean(); }
From source file:com.yolodata.tbana.hadoop.mapred.util.ArrayListTextWritable.java
License:Open Source License
@Override public void readFields(DataInput datainput) throws IOException { this.clear(); int count = datainput.readInt(); for (int i = 0; i < count; i++) { try {/*from w w w . ja va 2 s .c om*/ Text obj = Text.class.newInstance(); obj.readFields(datainput); this.add(obj); } catch (InstantiationException e) { e.printStackTrace(); } catch (IllegalAccessException e) { e.printStackTrace(); } } }
From source file:edu.ub.ahstfg.io.index.DocumentDescriptor.java
License:Open Source License
@Override public void readFields(DataInput input) throws IOException { Text t = new Text(); t.readFields(input); url = t.toString();//from w w w . j a va 2 s .co m ArrayWritable buffer = new ArrayWritable(IntWritable.class); buffer.readFields(input); termFreq = WritableConverter.arrayWritable2ShortArray(buffer); buffer = new ArrayWritable(IntWritable.class); buffer.readFields(input); keyFreq = WritableConverter.arrayWritable2ShortArray(buffer); }
From source file:gobblin.compat.TextSerializerTest.java
License:Apache License
@Test public void testSerialize() throws IOException { // Use our serializer, verify Hadoop deserializer can read it back for (String textToSerialize : textsToSerialize) { ByteArrayOutputStream bOs = new ByteArrayOutputStream(); DataOutputStream dataOutputStream = new DataOutputStream(bOs); TextSerializer.writeStringAsText(dataOutputStream, textToSerialize); dataOutputStream.close();//from w ww. j ava 2s.c om ByteArrayInputStream bIn = new ByteArrayInputStream(bOs.toByteArray()); DataInputStream dataInputStream = new DataInputStream(bIn); Text hadoopText = new Text(); hadoopText.readFields(dataInputStream); Assert.assertEquals(hadoopText.toString(), textToSerialize); } }
From source file:gobblin.metastore.MysqlStateStore.java
License:Apache License
@Override public T get(String storeName, String tableName, String stateId) throws IOException { try (Connection connection = dataSource.getConnection(); PreparedStatement queryStatement = connection.prepareStatement(SELECT_JOB_STATE_SQL)) { int index = 0; queryStatement.setString(++index, storeName); queryStatement.setString(++index, tableName); try (ResultSet rs = queryStatement.executeQuery()) { if (rs.next()) { Blob blob = rs.getBlob(1); Text key = new Text(); try (InputStream is = StreamUtils.isCompressed(blob.getBytes(1, 2)) ? new GZIPInputStream(blob.getBinaryStream()) : blob.getBinaryStream(); DataInputStream dis = new DataInputStream(is)) { // keep deserializing while we have data while (dis.available() > 0) { T state = this.stateClass.newInstance(); key.readFields(dis); state.readFields(dis); if (key.toString().equals(stateId)) { return state; }/*ww w. j ava 2 s . co m*/ } } catch (EOFException e) { // no more data. GZIPInputStream.available() doesn't return 0 until after EOF. } } } } catch (RuntimeException e) { throw e; } catch (Exception e) { throw new IOException( "failure retrieving state from storeName " + storeName + " tableName " + tableName, e); } return null; }
From source file:gobblin.metastore.ZkStateStore.java
License:Apache License
/** * Deserialize data into a list of {@link State}s. * @param data byte array// w w w .j a v a 2 s . c om * @param states output list of states * @param stateId optional key filter. Set to null for no filtering. * @throws IOException */ private void deserialize(byte[] data, List<T> states, String stateId) throws IOException { if (data != null) { Text key = new Text(); try (ByteArrayInputStream bais = new ByteArrayInputStream(data); InputStream is = StreamUtils.isCompressed(data) ? new GZIPInputStream(bais) : bais; DataInputStream dis = new DataInputStream(is)) { // keep deserializing while we have data while (dis.available() > 0) { T state = this.stateClass.newInstance(); key.readFields(dis); state.readFields(dis); states.add(state); if (stateId != null && key.toString().equals(stateId)) { return; } } } catch (EOFException e) { // no more data. GZIPInputStream.available() doesn't return 0 until after EOF. } catch (RuntimeException e) { throw e; } catch (Exception e) { throw new IOException("failure deserializing state from ZkStateStore", e); } } }
From source file:gobblin.runtime.JobState.java
License:Apache License
@Override public void readFields(DataInput in) throws IOException { Text text = new Text(); text.readFields(in); this.jobName = text.toString().intern(); text.readFields(in);//from w w w. j a va 2s. co m this.jobId = text.toString().intern(); this.setId(this.jobId); this.startTime = in.readLong(); this.endTime = in.readLong(); this.duration = in.readLong(); text.readFields(in); this.state = RunningState.valueOf(text.toString()); this.taskCount = in.readInt(); int numTaskStates = in.readInt(); getTaskStateWithCommonAndSpecWuProps(numTaskStates, in); super.readFields(in); }
From source file:gobblin.runtime.TaskState.java
License:Apache License
@Override public void readFields(DataInput in) throws IOException { Text text = new Text(); text.readFields(in); this.jobId = text.toString().intern(); text.readFields(in);//from w w w. ja v a2s. c o m this.taskId = text.toString().intern(); this.setId(this.taskId); this.startTime = in.readLong(); this.endTime = in.readLong(); this.duration = in.readLong(); super.readFields(in); }
From source file:it.uniroma1.bdc.tesi.piccioli.giraphstandalone.ksimplecycle.TextAndHashes.java
@Override public void readFields(DataInput in) throws IOException { int size;/*from w ww .j ava 2s . c om*/ value.readFields(in); size = in.readInt(); for (int i = 0; i < size; i++) { this.generatedHash.add(in.readInt()); } size = in.readInt();//Leggo numero di key da inserire nella MAP for (int i = 0; i < size; i++) { Text key = new Text(); key.readFields(in);//Leggo Chiave this.seenHash.put(key, new HashSet());//inserisco chiave nella MAP int sizeSet = in.readInt();//Leggo numero elementi nel set puntanto dal value in seenHash for (int j = 0; j < sizeSet; j++) { this.seenHash.get(key).add(in.readInt()); } } // }
From source file:it.uniroma1.bdc.tesi.piccioli.giraphstandalone.message.CustomMessageWithAggregatedPath.java
@Override public void readFields(DataInput in) throws IOException { int size = in.readInt(); visitedVertex = new ArrayList<HashSet<Text>>(size); for (int i = 0; i < size; i++) { int sizeItem = in.readInt(); visitedVertex.set(i, new HashSet<Text>()); for (int j = 0; j < sizeItem; j++) { Text toAdd = new Text(); toAdd.readFields(in); visitedVertex.get(i).add(toAdd); }//from w w w .j a va2 s.co m } }