Example usage for java.io DataInput readUTF

List of usage examples for java.io DataInput readUTF

Introduction

In this page you can find the example usage for java.io DataInput readUTF.

Prototype

String readUTF() throws IOException;

Source Link

Document

Reads in a string that has been encoded using a modified UTF-8 format.

Usage

From source file:org.apache.hadoop.mapred.SampleTaskStatus.java

public void readFields(DataInput in) throws IOException {

    this.sampleMapTaskId.readFields(in);
    sampleMapTracker = in.readUTF();
    readInputStartTime = in.readLong();//from  w w w  .  j  a v  a 2 s.  c  o m
    readInputDoneTime = in.readLong();
    writeOutputStartTime = in.readLong();
    writeOutputDoneTime = in.readLong();
    networkSampleMapCopyDurationMilliSec = in.readLong();
    additionalSpillDurationMilliSec = in.readLong();
    additionalSpillSize = in.readLong();
}

From source file:org.apache.hama.bsp.BSPMessageBundle.java

@SuppressWarnings("unchecked")
@Override//from  w  w w . ja va  2  s  .co  m
public void readFields(DataInput in) throws IOException {
    int num = in.readInt();

    if (num > 0) {
        Class<M> clazz = null;
        try {
            clazz = (Class<M>) Class.forName(in.readUTF());
        } catch (ClassNotFoundException e) {
            LOG.error("Class was not found.", e);
        }

        for (int i = 0; i < num; i++) {
            M msg = ReflectionUtils.newInstance(clazz);
            msg.readFields(in);
            messages.add(msg);
        }
    }
}

From source file:org.apache.horn.core.LayeredNeuralNetwork.java

@Override
public void readFields(DataInput input) throws IOException {
    super.readFields(input);

    this.finalLayerIdx = input.readInt();
    this.dropRate = input.readFloat();

    // read neuron classes
    int neuronClasses = input.readInt();
    this.neuronClassList = Lists.newArrayList();
    for (int i = 0; i < neuronClasses; ++i) {
        try {/*from ww w . j av a 2s . c  om*/
            Class<? extends Neuron> clazz = (Class<? extends Neuron>) Class.forName(input.readUTF());
            neuronClassList.add(clazz);
        } catch (ClassNotFoundException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
    }

    // read squash functions
    int squashingFunctionSize = input.readInt();
    this.squashingFunctionList = Lists.newArrayList();
    for (int i = 0; i < squashingFunctionSize; ++i) {
        this.squashingFunctionList.add(FunctionFactory.createFloatFunction(WritableUtils.readString(input)));
    }

    // read weights and construct matrices of previous updates
    int numOfMatrices = input.readInt();
    this.weightMatrixList = Lists.newArrayList();
    this.prevWeightUpdatesList = Lists.newArrayList();
    for (int i = 0; i < numOfMatrices; ++i) {
        FloatMatrix matrix = FloatMatrixWritable.read(input);
        this.weightMatrixList.add(matrix);
        this.prevWeightUpdatesList.add(new DenseFloatMatrix(matrix.getRowCount(), matrix.getColumnCount()));
    }

}

From source file:org.apache.horn.core.RecurrentLayeredNeuralNetwork.java

@Override
public void readFields(DataInput input) throws IOException {
    super.readFields(input);

    this.finalLayerIdx = input.readInt();
    this.dropRate = input.readFloat();

    // read neuron classes
    int neuronClasses = input.readInt();
    this.neuronClassList = Lists.newArrayList();
    for (int i = 0; i < neuronClasses; ++i) {
        try {// w  w  w .  j a  v  a  2  s  .com
            Class<? extends Neuron> clazz = (Class<? extends Neuron>) Class.forName(input.readUTF());
            neuronClassList.add(clazz);
        } catch (ClassNotFoundException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
    }

    // read squash functions
    int squashingFunctionSize = input.readInt();
    this.squashingFunctionList = Lists.newArrayList();
    for (int i = 0; i < squashingFunctionSize; ++i) {
        this.squashingFunctionList.add(FunctionFactory.createFloatFunction(WritableUtils.readString(input)));
    }

    this.recurrentStepSize = input.readInt();
    this.numOutCells = input.readInt();
    int recurrentLayerListSize = input.readInt();
    this.recurrentLayerList = Lists.newArrayList();
    for (int i = 0; i < recurrentLayerListSize; i++) {
        this.recurrentLayerList.add(input.readBoolean());
    }

    // read weights and construct matrices of previous updates
    int numOfMatrices = input.readInt();
    this.weightMatrixLists = Lists.newArrayListWithExpectedSize(this.recurrentStepSize);
    this.prevWeightUpdatesLists = Lists.newArrayList();

    for (int step = 0; step < this.recurrentStepSize; step++) {
        this.weightMatrixList = Lists.newArrayList();
        this.prevWeightUpdatesList = Lists.newArrayList();

        for (int j = 0; j < this.layerSizeList.size() - 2; j++) {
            FloatMatrix matrix = FloatMatrixWritable.read(input);
            this.weightMatrixList.add(matrix);
            this.prevWeightUpdatesList.add(new DenseFloatMatrix(matrix.getRowCount(), matrix.getColumnCount()));
        }
        // if the cell has output layer, read from input
        if (step >= this.recurrentStepSize - this.numOutCells) {
            FloatMatrix matrix = FloatMatrixWritable.read(input);
            this.weightMatrixList.add(matrix);
            this.prevWeightUpdatesList.add(new DenseFloatMatrix(matrix.getRowCount(), matrix.getColumnCount()));
        }
        this.weightMatrixLists.add(this.weightMatrixList);
        this.prevWeightUpdatesLists.add(this.prevWeightUpdatesList);
    }
}

From source file:org.apache.kylin.dict.lookup.SnapshotTable.java

void readData(DataInput in) throws IOException {
    int rowNum = in.readInt();
    if (rowNum > 0) {
        int n = in.readInt();
        rowIndices = new ArrayList<int[]>(rowNum);

        if (this.useDictionary == true) {
            this.dict = new TrieDictionary<String>();
            dict.readFields(in);//from   www .  ja  v a 2  s .  c o m

            for (int i = 0; i < rowNum; i++) {
                int[] row = new int[n];
                this.rowIndices.add(row);
                for (int j = 0; j < n; j++) {
                    row[j] = in.readInt();
                }
            }
        } else {
            List<String[]> rows = new ArrayList<String[]>(rowNum);
            TrieDictionaryBuilder<String> b = new TrieDictionaryBuilder<String>(new StringBytesConverter());

            for (int i = 0; i < rowNum; i++) {
                String[] row = new String[n];
                rows.add(row);
                for (int j = 0; j < n; j++) {
                    row[j] = in.readUTF();
                    // NULL_STR is tricky, but we don't want to break the current snapshots
                    if (row[j].equals(NULL_STR))
                        row[j] = null;

                    b.addValue(row[j]);
                }
            }
            this.dict = b.build(0);
            for (String[] row : rows) {
                int[] rowIndex = new int[n];
                for (int i = 0; i < n; i++) {
                    rowIndex[i] = dict.getIdFromValue(row[i]);
                }
                this.rowIndices.add(rowIndex);
            }
        }
    } else {
        rowIndices = new ArrayList<int[]>();
        dict = new TrieDictionary<String>();
    }
}

From source file:org.apache.mahout.text.LuceneStorageConfiguration.java

@Override
public void readFields(DataInput in) throws IOException {
    try {//from ww w  . ja  v a  2 s .c  om
        sequenceFilesOutputPath = new Path(in.readUTF());
        indexPaths = Lists.newArrayList();
        String[] indexPaths = in.readUTF().split(SEPARATOR_PATHS);
        for (String indexPath : indexPaths) {
            this.indexPaths.add(new Path(indexPath));
        }
        idField = in.readUTF();
        fields = Arrays.asList(in.readUTF().split(SEPARATOR_FIELDS));
        query = new QueryParser(LUCENE_46, "query", new StandardAnalyzer(LUCENE_46)).parse(in.readUTF());
        maxHits = in.readInt();
    } catch (ParseException e) {
        throw new RuntimeException("Could not deserialize " + this.getClass().getName(), e);
    }
}

From source file:org.apache.sqoop.connector.idf.CSVIntermediateDataFormat.java

/**
 * {@inheritDoc}//  w  w  w . jav a  2s . c o  m
 */
@Override
public void read(DataInput in) throws IOException {
    data = in.readUTF();
}

From source file:org.apache.sqoop.connector.idf.JSONIntermediateDataFormat.java

/**
 * {@inheritDoc}//from   ww  w.j a v a2  s. c  o m
 */
@Override
public void read(DataInput in) throws IOException {
    try {
        data = (JSONObject) new JSONParser().parse(in.readUTF());
    } catch (ParseException e) {
        throw new SqoopException(JSONIntermediateDataFormatError.JSON_INTERMEDIATE_DATA_FORMAT_0002, e);
    }
}

From source file:org.apache.sysml.runtime.matrix.data.FrameBlock.java

@Override
public void readFields(DataInput in) throws IOException {
    //read head (rows, cols)
    _numRows = in.readInt();//from  ww  w  .  j  a  va2s  . c  o m
    int numCols = in.readInt();
    boolean isDefaultMeta = in.readBoolean();
    //allocate schema/meta data arrays
    _schema = (_schema != null && _schema.length == numCols) ? _schema : new ValueType[numCols];
    _colnames = (_colnames != null && _colnames.length == numCols) ? _colnames : new String[numCols];
    _colmeta = (_colmeta != null && _colmeta.length == numCols) ? _colmeta : new ColumnMetadata[numCols];
    _coldata = (_coldata != null && _coldata.length == numCols) ? _coldata : new Array[numCols];
    //read columns (value type, meta, data)
    for (int j = 0; j < numCols; j++) {
        ValueType vt = ValueType.values()[in.readByte()];
        String name = isDefaultMeta ? createColName(j) : in.readUTF();
        long ndistinct = isDefaultMeta ? 0 : in.readLong();
        String mvvalue = isDefaultMeta ? null : in.readUTF();
        Array arr = null;
        switch (vt) {
        case STRING:
            arr = new StringArray(new String[_numRows]);
            break;
        case BOOLEAN:
            arr = new BooleanArray(new boolean[_numRows]);
            break;
        case INT:
            arr = new LongArray(new long[_numRows]);
            break;
        case DOUBLE:
            arr = new DoubleArray(new double[_numRows]);
            break;
        default:
            throw new IOException("Unsupported value type: " + vt);
        }
        arr.readFields(in);
        _schema[j] = vt;
        _colnames[j] = name;
        _colmeta[j] = new ColumnMetadata(ndistinct, (mvvalue == null || mvvalue.isEmpty()) ? null : mvvalue);
        _coldata[j] = arr;
    }
}

From source file:org.hyperic.hq.agent.AgentRemoteValue.java

public static AgentRemoteValue fromStream(DataInput is) throws IOException {
    Map<String, String> chunkedValues = new LinkedHashMap<String, String>();
    AgentRemoteValue res = new AgentRemoteValue();
    String key, val;

    key = is.readUTF();
    while (key.length() != 0) {
        val = is.readUTF();
        if (Pattern.matches("^" + CHUNK_PREFIX + "[.].*[.](\\d)", key)) {
            chunkedValues.put(key, val);
        } else {//from  w  w w  . j ava 2  s . co  m
            res.setValue(key, val);
        }
        key = is.readUTF();
    }

    for (Entry<String, String> entry : getUnchunkedValues(chunkedValues).entrySet()) {
        res.setValue(entry.getKey(), entry.getValue());
    }

    return res;
}