Example usage for java.io DataOutput writeInt

List of usage examples for java.io DataOutput writeInt

Introduction

In this page you can find the example usage for java.io DataOutput writeInt.

Prototype

void writeInt(int v) throws IOException;

Source Link

Document

Writes an int value, which is comprised of four bytes, to the output stream.

Usage

From source file:org.apache.accumulo.core.client.mapreduce.RangeInputSplit.java

@Override
public void write(DataOutput out) throws IOException {
    range.write(out);/*from www .  j a  va 2 s . c  o  m*/
    out.writeUTF(tableName);
    out.writeUTF(tableId);
    out.writeInt(locations.length);
    for (int i = 0; i < locations.length; ++i)
        out.writeUTF(locations[i]);

    out.writeBoolean(null != isolatedScan);
    if (null != isolatedScan) {
        out.writeBoolean(isolatedScan);
    }

    out.writeBoolean(null != offline);
    if (null != offline) {
        out.writeBoolean(offline);
    }

    out.writeBoolean(null != localIterators);
    if (null != localIterators) {
        out.writeBoolean(localIterators);
    }

    out.writeBoolean(null != mockInstance);
    if (null != mockInstance) {
        out.writeBoolean(mockInstance);
    }

    out.writeBoolean(null != fetchedColumns);
    if (null != fetchedColumns) {
        String[] cols = InputConfigurator.serializeColumns(fetchedColumns);
        out.writeInt(cols.length);
        for (String col : cols) {
            out.writeUTF(col);
        }
    }

    out.writeBoolean(null != auths);
    if (null != auths) {
        out.writeUTF(auths.serialize());
    }

    out.writeBoolean(null != principal);
    if (null != principal) {
        out.writeUTF(principal);
    }

    out.writeBoolean(null != tokenSource);
    if (null != tokenSource) {
        out.writeInt(tokenSource.ordinal());

        if (null != token && null != tokenFile) {
            throw new IOException(
                    "Cannot use both inline AuthenticationToken and file-based AuthenticationToken");
        } else if (null != token) {
            out.writeUTF(token.getClass().getCanonicalName());
            out.writeUTF(Base64.encodeBase64String(AuthenticationTokenSerializer.serialize(token)));
        } else {
            out.writeUTF(tokenFile);
        }
    }

    out.writeBoolean(null != instanceName);
    if (null != instanceName) {
        out.writeUTF(instanceName);
    }

    out.writeBoolean(null != zooKeepers);
    if (null != zooKeepers) {
        out.writeUTF(zooKeepers);
    }

    out.writeBoolean(null != level);
    if (null != level) {
        out.writeInt(level.toInt());
    }
}

From source file:ml.shifu.shifu.core.dtrain.dataset.PersistBasicFloatNetwork.java

public void saveNetwork(DataOutput out, final BasicFloatNetwork network) throws IOException {
    final FlatNetwork flat = network.getStructure().getFlat();
    // write general properties
    Map<String, String> properties = network.getProperties();
    if (properties == null) {
        out.writeInt(0);
    } else {/*from  w w  w.  j ava 2s .  c o m*/
        out.writeInt(properties.size());
        for (Entry<String, String> entry : properties.entrySet()) {
            ml.shifu.shifu.core.dtrain.StringUtils.writeString(out, entry.getKey());
            ml.shifu.shifu.core.dtrain.StringUtils.writeString(out, entry.getValue());
        }
    }

    // write fields values in BasicFloatNetwork
    out.writeInt(flat.getBeginTraining());
    out.writeDouble(flat.getConnectionLimit());

    writeIntArray(out, flat.getContextTargetOffset());
    writeIntArray(out, flat.getContextTargetSize());

    out.writeInt(flat.getEndTraining());
    out.writeBoolean(flat.getHasContext());
    out.writeInt(flat.getInputCount());

    writeIntArray(out, flat.getLayerCounts());
    writeIntArray(out, flat.getLayerFeedCounts());
    writeIntArray(out, flat.getLayerContextCount());
    writeIntArray(out, flat.getLayerIndex());
    writeDoubleArray(out, flat.getLayerOutput());
    out.writeInt(flat.getOutputCount());
    writeIntArray(out, flat.getWeightIndex());
    writeDoubleArray(out, flat.getWeights());
    writeDoubleArray(out, flat.getBiasActivation());

    // write activation list
    out.writeInt(flat.getActivationFunctions().length);
    for (final ActivationFunction af : flat.getActivationFunctions()) {
        ml.shifu.shifu.core.dtrain.StringUtils.writeString(out, af.getClass().getSimpleName());
        writeDoubleArray(out, af.getParams());
    }
    // write sub sets
    Set<Integer> featureList = network.getFeatureSet();
    if (featureList == null || featureList.size() == 0) {
        out.writeInt(0);
    } else {
        out.writeInt(featureList.size());
        for (Integer integer : featureList) {
            out.writeInt(integer);
        }
    }
}

From source file:eu.stratosphere.nephele.jobgraph.AbstractJobVertex.java

@Override
public void write(final DataOutput out) throws IOException {

    // Number of subtasks
    out.writeInt(this.numberOfSubtasks);

    // Number of execution retries
    out.writeInt(this.numberOfExecutionRetries);

    // Vertex to share instance with
    if (this.vertexToShareInstancesWith != null) {
        out.writeBoolean(true);/* w w w .  j  av  a2 s  . c  om*/
        this.vertexToShareInstancesWith.getID().write(out);
    } else {
        out.writeBoolean(false);
    }

    // Write the configuration
    this.configuration.write(out);

    // We ignore the backward edges and connect them when we reconstruct the graph on the remote side, only write
    // number of forward edges
    out.writeInt(this.forwardEdges.size());

    // Now output the IDs of the vertices this vertex is connected to
    for (int i = 0; i < this.forwardEdges.size(); i++) {
        final JobEdge edge = this.forwardEdges.get(i);
        if (edge == null) {
            out.writeBoolean(false);
        } else {
            out.writeBoolean(true);
            edge.getConnectedVertex().getID().write(out);
            EnumUtils.writeEnum(out, edge.getChannelType());
            EnumUtils.writeEnum(out, edge.getDistributionPattern());
            out.writeInt(edge.getIndexOfInputGate());
        }
    }

    // Write the invokable class
    if (this.invokableClass == null) {
        out.writeBoolean(false);
        return;
    }

    out.writeBoolean(true);

    // Write out the name of the class
    StringRecord.writeString(out, this.invokableClass.getName());
}

From source file:com.netflix.aegisthus.input.AegSplit.java

@Override
public void write(DataOutput out) throws IOException {
    out.writeLong(end);//from  ww  w  . java 2s.  c  om
    WritableUtils.writeStringArray(out, hosts);
    WritableUtils.writeString(out, path.toUri().toString());
    out.writeBoolean(compressed);
    if (compressed) {
        WritableUtils.writeString(out, compressedPath.toUri().toString());
    }
    out.writeLong(start);
    WritableUtils.writeEnum(out, type);
    if (convertors != null) {
        String[] parts = new String[2];
        out.writeInt(convertors.size());
        for (Map.Entry<String, AbstractType> e : convertors.entrySet()) {
            parts[0] = e.getKey();
            parts[1] = e.getValue().toString();
            WritableUtils.writeStringArray(out, parts);
        }
    } else {
        out.writeInt(0);
    }
}

From source file:libra.common.hadoop.io.datatypes.CompressedIntArrayWritable.java

@Override
public void write(DataOutput out) throws IOException {
    int count = this.intArray.length;
    byte flag = makeFlag(count, this.intArray);
    out.writeByte(flag);//from  www.j  a  v a 2 s  . co m

    if ((flag & 0x0f) == 0x00) {
        out.writeByte(count);
    } else if ((flag & 0x0f) == 0x01) {
        out.writeShort(count);
    } else if ((flag & 0x0f) == 0x02) {
        out.writeInt(count);
    } else {
        throw new IOException("unhandled flag");
    }

    if ((flag & 0xf0) == 0x00) {
        for (int i = 0; i < count; i++) {
            out.writeByte((byte) this.intArray[i]);
        }
    } else if ((flag & 0xf0) == 0x10) {
        for (int i = 0; i < count; i++) {
            out.writeShort((short) this.intArray[i]);
        }
    } else if ((flag & 0xf0) == 0x20) {
        for (int i = 0; i < count; i++) {
            out.writeInt((int) this.intArray[i]);
        }
    } else {
        throw new IOException("unhandled flag");
    }
}

From source file:org.apache.hadoop.hbase.io.hfile.FixedFileTrailer.java

/**
 * Write the trailer to a data stream. We support writing version 1 for
 * testing and for determining version 1 trailer size. It is also easy to see
 * what fields changed in version 2.//from  w ww.  ja va 2 s . co m
 *
 * @param outputStream
 * @throws IOException
 */
void serialize(DataOutputStream outputStream) throws IOException {
    HFile.checkFormatVersion(majorVersion);

    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    DataOutput baosDos = new DataOutputStream(baos);

    BlockType.TRAILER.write(baosDos);
    baosDos.writeLong(fileInfoOffset);
    baosDos.writeLong(loadOnOpenDataOffset);
    baosDos.writeInt(dataIndexCount);

    if (majorVersion == 1) {
        // This used to be metaIndexOffset, but it was not used in version 1.
        baosDos.writeLong(0);
    } else {
        baosDos.writeLong(uncompressedDataIndexSize);
    }

    baosDos.writeInt(metaIndexCount);
    baosDos.writeLong(totalUncompressedBytes);
    if (majorVersion == 1) {
        baosDos.writeInt((int) Math.min(Integer.MAX_VALUE, entryCount));
    } else {
        // This field is long from version 2 onwards.
        baosDos.writeLong(entryCount);
    }
    baosDos.writeInt(compressionCodec.ordinal());

    if (majorVersion > 1) {
        baosDos.writeInt(numDataIndexLevels);
        baosDos.writeLong(firstDataBlockOffset);
        baosDos.writeLong(lastDataBlockOffset);
        Bytes.writeStringFixedSize(baosDos, comparatorClassName, MAX_COMPARATOR_NAME_LENGTH);
    }

    // serialize the major and minor versions
    baosDos.writeInt(materializeVersion(majorVersion, minorVersion));

    outputStream.write(baos.toByteArray());
}

From source file:org.apache.marmotta.kiwi.io.KiWiIO.java

/**
 * Write a string to the data output. In case the string length exceeds LITERAL_COMPRESS_LENGTH, uses a LZW
 * compressed format, otherwise writes the plain bytes.
 *
 * @param out      output destination to write to
 * @param content  string to write//from w  w w  . j a  v  a  2 s.c  o m
 * @throws IOException
 */
private static void writeContent(DataOutput out, String content) throws IOException {
    if (content.length() > LITERAL_COMPRESS_LENGTH) {
        // temporary buffer of the size of bytes in the content string (assuming that the compressed data will fit into it)
        byte[] data = content.getBytes("UTF-8");
        byte[] buffer = new byte[data.length];

        Deflater compressor = new Deflater(Deflater.BEST_COMPRESSION, true);
        compressor.setInput(data);
        compressor.finish();

        int length = compressor.deflate(buffer);

        // only use compressed version if it is smaller than the number of bytes used by the string
        if (length < buffer.length) {
            log.debug("compressed string with {} bytes; compression ratio {}", data.length,
                    (double) length / data.length);

            out.writeByte(MODE_COMPRESSED);
            out.writeInt(data.length);
            out.writeInt(length);
            out.write(buffer, 0, length);
        } else {
            log.warn("compressed length exceeds string buffer: {} > {}", length, buffer.length);

            out.writeByte(MODE_DEFAULT);
            DataIO.writeString(out, content);
        }

        compressor.end();
    } else {
        out.writeByte(MODE_DEFAULT);
        DataIO.writeString(out, content);
    }
}

From source file:org.apache.horn.core.LayeredNeuralNetwork.java

@Override
public void write(DataOutput output) throws IOException {
    super.write(output);
    output.writeInt(finalLayerIdx);
    output.writeFloat(dropRate);//w  ww  .  j av a 2  s.c o  m

    // write neuron classes
    output.writeInt(this.neuronClassList.size());
    for (Class<? extends Neuron> clazz : this.neuronClassList) {
        output.writeUTF(clazz.getName());
    }

    // write squashing functions
    output.writeInt(this.squashingFunctionList.size());
    for (FloatFunction aSquashingFunctionList : this.squashingFunctionList) {
        WritableUtils.writeString(output, aSquashingFunctionList.getFunctionName());
    }

    // write weight matrices
    output.writeInt(this.weightMatrixList.size());
    for (FloatMatrix aWeightMatrixList : this.weightMatrixList) {
        FloatMatrixWritable.write(aWeightMatrixList, output);
    }

    // DO NOT WRITE WEIGHT UPDATE
}

From source file:FormatStorage1.IRecord.java

@Override
public void write(DataOutput out) throws IOException {
    out.writeBoolean(this.oldformat);
    out.writeInt(this.fieldnum);

    int i = 0;// w ww .j  a v a  2  s.co m
    while (i++ < fieldnum) {
        fieldTypes.get(idxs[i - 1]).write(out);
    }
    this.persistent(out);
}

From source file:org.apache.carbondata.core.metadata.blocklet.BlockletInfo.java

@Override
public void write(DataOutput output) throws IOException {
    output.writeLong(dimensionOffset);/*from  ww  w .ja  v a  2s.  co  m*/
    output.writeLong(measureOffsets);
    int dsize = dimensionChunkOffsets != null ? dimensionChunkOffsets.size() : 0;
    output.writeShort(dsize);
    for (int i = 0; i < dsize; i++) {
        output.writeLong(dimensionChunkOffsets.get(i));
    }
    for (int i = 0; i < dsize; i++) {
        output.writeInt(dimensionChunksLength.get(i));
    }
    int mSize = measureChunkOffsets != null ? measureChunkOffsets.size() : 0;
    output.writeShort(mSize);
    for (int i = 0; i < mSize; i++) {
        output.writeLong(measureChunkOffsets.get(i));
    }
    for (int i = 0; i < mSize; i++) {
        output.writeInt(measureChunksLength.get(i));
    }
    writeChunkInfoForOlderVersions(output);

    boolean isSortedPresent = (isSorted != null);
    output.writeBoolean(isSortedPresent);
    if (isSortedPresent) {
        output.writeBoolean(isSorted);
    }
    if (null != getNumberOfRowsPerPage()) {
        output.writeShort(getNumberOfRowsPerPage().length);
        for (int i = 0; i < getNumberOfRowsPerPage().length; i++) {
            output.writeInt(getNumberOfRowsPerPage()[i]);
        }
    }
}