Example usage for java.io DataOutputStream writeInt

List of usage examples for java.io DataOutputStream writeInt

Introduction

In this page you can find the example usage for java.io DataOutputStream writeInt.

Prototype

public final void writeInt(int v) throws IOException 

Source Link

Document

Writes an int to the underlying output stream as four bytes, high byte first.

Usage

From source file:org.openxdata.server.serializer.JavaRosaXformSerializer.java

@SuppressWarnings("unchecked")
public void serializeUsers(OutputStream os, Object data) {
    List<Object[]> users = (List<Object[]>) data;
    DataOutputStream dos = new DataOutputStream(os);
    try {//  www.  j av  a  2  s .  c om
        dos.writeByte(users.size());
        for (Object[] user : users) {
            dos.writeInt((Integer) user[0]);
            dos.writeUTF((String) user[1]);
            dos.writeUTF((String) user[2]);
            dos.writeUTF((String) user[3]);
        }
    } catch (IOException e) {
        throw new UnexpectedException(e);
    }
}

From source file:org.apache.hadoop.hbase.io.hfile.TestHFileEncryption.java

private int writeBlock(FSDataOutputStream os, HFileContext fileContext, int size) throws IOException {
    HFileBlock.Writer hbw = new HFileBlock.Writer(null, fileContext);
    DataOutputStream dos = hbw.startWriting(BlockType.DATA);
    for (int j = 0; j < size; j++) {
        dos.writeInt(j);
    }//  w ww.  j  a  v  a  2s .  c om
    hbw.writeHeaderAndData(os);
    LOG.info("Wrote a block at " + os.getPos() + " with" + " onDiskSizeWithHeader="
            + hbw.getOnDiskSizeWithHeader() + " uncompressedSizeWithoutHeader="
            + hbw.getOnDiskSizeWithoutHeader() + " uncompressedSizeWithoutHeader="
            + hbw.getUncompressedSizeWithoutHeader());
    return hbw.getOnDiskSizeWithHeader();
}

From source file:J2MEWriteReadMixedDataTypesExample.java

public void commandAction(Command command, Displayable displayable) {
    if (command == exit) {
        destroyApp(true);/*from  w w w  .  ja  v  a 2 s.  c  om*/
        notifyDestroyed();
    } else if (command == start) {
        try {
            recordstore = RecordStore.openRecordStore("myRecordStore", true);
            byte[] outputRecord;
            String outputString = "First Record";
            int outputInteger = 15;
            boolean outputBoolean = true;
            ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
            DataOutputStream outputDataStream = new DataOutputStream(outputStream);
            outputDataStream.writeUTF(outputString);
            outputDataStream.writeBoolean(outputBoolean);
            outputDataStream.writeInt(outputInteger);
            outputDataStream.flush();
            outputRecord = outputStream.toByteArray();
            recordstore.addRecord(outputRecord, 0, outputRecord.length);
            outputStream.reset();
            outputStream.close();
            outputDataStream.close();
            String inputString = null;
            int inputInteger = 0;
            boolean inputBoolean = false;
            byte[] byteInputData = new byte[100];
            ByteArrayInputStream inputStream = new ByteArrayInputStream(byteInputData);
            DataInputStream inputDataStream = new DataInputStream(inputStream);
            for (int x = 1; x <= recordstore.getNumRecords(); x++) {
                recordstore.getRecord(x, byteInputData, 0);
                inputString = inputDataStream.readUTF();
                inputBoolean = inputDataStream.readBoolean();
                inputInteger = inputDataStream.readInt();
                inputStream.reset();
            }
            inputStream.close();
            inputDataStream.close();
            alert = new Alert("Reading", inputString + " " + inputInteger + " " + inputBoolean, null,
                    AlertType.WARNING);
            alert.setTimeout(Alert.FOREVER);
            display.setCurrent(alert);
            recordstore.closeRecordStore();
            if (RecordStore.listRecordStores() != null) {
                RecordStore.deleteRecordStore("myRecordStore");
            }
        } catch (Exception error) {
            alert = new Alert("Error Removing", error.toString(), null, AlertType.WARNING);
            alert.setTimeout(Alert.FOREVER);
            display.setCurrent(alert);
        }
    }
}

From source file:org.apache.cassandra.db.ReadCommand.java

public void serialize(ReadCommand rm, DataOutputStream dos) throws IOException {
    dos.writeUTF(rm.table);/*  w  w  w  . j av a 2  s.c  o  m*/
    dos.writeUTF(rm.key);
    dos.writeUTF(rm.columnFamilyColumn);
    dos.writeInt(rm.start);
    dos.writeInt(rm.count);
    dos.writeLong(rm.sinceTimestamp);
    dos.writeBoolean(rm.isDigestQuery());
    dos.writeInt(rm.columnNames.size());
    if (rm.columnNames.size() > 0) {
        for (String cName : rm.columnNames) {
            dos.writeInt(cName.getBytes().length);
            dos.write(cName.getBytes());
        }
    }
}

From source file:org.eclipse.gyrex.cloud.internal.queue.Message.java

public byte[] toByteArray() throws IOException {
    final ByteArrayOutputStream bos = new ByteArrayOutputStream();
    final DataOutputStream dos = new DataOutputStream(bos);
    try {/* w w  w . j  a  v a 2  s .  co  m*/
        dos.writeInt(1); // serialized format version
        dos.writeLong(invisibleTimeoutTS); // invisible timeout
        dos.writeInt(body.length); // body size
        dos.write(body); // body
        return bos.toByteArray();
    } finally {
        IOUtils.closeQuietly(dos);
    }
}

From source file:org.openmrs.module.odkconnector.serialization.serializer.openmrs.PatientSerializer.java

/**
 * Write the patient information to the output stream.
 *
 * @param stream the output stream//from ww w  . ja v a 2s .  c om
 * @param data   the data that need to be written to the output stream
 */
@Override
public void write(final OutputStream stream, final Object data) throws IOException {
    try {
        Patient patient = (Patient) data;

        DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ");

        DataOutputStream outputStream = new DataOutputStream(stream);
        // skip if the patient is an invalid patient
        if (patient == null || patient.getPersonName() == null || patient.getPatientIdentifier() == null)
            return;

        outputStream.writeInt(patient.getPatientId());

        PersonName personName = patient.getPersonName();
        outputStream.writeUTF(StringUtils.defaultString(personName.getFamilyName()));
        outputStream.writeUTF(StringUtils.defaultString(personName.getMiddleName()));
        outputStream.writeUTF(StringUtils.defaultString(personName.getGivenName()));

        outputStream.writeUTF(StringUtils.defaultString(patient.getGender()));

        Date birthDate = patient.getBirthdate();
        outputStream.writeUTF(birthDate != null ? dateFormat.format(birthDate.getTime()) : StringUtils.EMPTY);

        PatientIdentifier patientIdentifier = patient.getPatientIdentifier();
        outputStream.writeUTF(StringUtils.defaultString(patientIdentifier.getIdentifier()));
    } catch (IOException e) {
        log.info("Writing patient information failed!", e);
    }
}

From source file:org.getspout.spout.packet.PacketCacheFile.java

public void writeData(DataOutputStream output) throws IOException {
    PacketUtil.writeString(output, fileName);
    PacketUtil.writeString(output, plugin);
    output.writeBoolean(compressed);//w  w  w .  jav  a2s.c o m
    output.writeInt(fileData.length);
    output.write(fileData);
}

From source file:com.exzogeni.dk.http.cache.DiscCacheStore.java

private void saveMetaFile(@NonNull File metaFile, @NonNull Map<String, List<String>> metaHeaders, long maxAge)
        throws IOException {
    final AtomicFile af = new AtomicFile(metaFile);
    final FileOutputStream fos = af.startWrite();
    try {/*from   w  ww.jav  a 2s  .  co m*/
        final DataOutputStream dat = new DataOutputStream(new BufferPoolOutputStream(fos));
        dat.writeLong(System.currentTimeMillis() + maxAge);
        dat.writeInt(metaHeaders.size());
        for (final Map.Entry<String, List<String>> header : metaHeaders.entrySet()) {
            dat.writeUTF(header.getKey());
            dat.writeInt(header.getValue().size());
            for (final String value : header.getValue()) {
                dat.writeUTF(value);
            }
        }
        IOUtils.closeQuietly(dat);
        af.finishWrite(fos);
    } catch (IOException e) {
        af.failWrite(fos);
        af.delete();
        throw e;
    }
}

From source file:net.sf.keystore_explorer.crypto.x509.X509ExtensionSet.java

private void saveExtensions(Map<String, byte[]> extensions, DataOutputStream dos) throws IOException {
    dos.writeInt(extensions.size());

    for (String oid : extensions.keySet()) {
        dos.writeInt(oid.length());/*ww  w  .jav a  2  s  .c  o m*/
        dos.writeChars(oid);

        byte[] value = extensions.get(oid);
        dos.writeInt(value.length);
        dos.write(value);
    }
}

From source file:edu.umn.cs.spatialHadoop.nasa.StockQuadTree.java

/**
 * Constructs an aggregate quad tree out of a two-dimensional array of values.
 * /* w  w  w. j a v  a2  s .c  om*/
 * @param values
 * @param out
 *          - the output stream to write the constructed quad tree to
 * @throws IOException
 */
public static void build(NASADataset metadata, short[] values, short fillValue, DataOutputStream out)
        throws IOException {
    int length = Array.getLength(values);
    int resolution = (int) Math.round(Math.sqrt(length));

    // Write tree header
    out.writeInt(resolution); // resolution
    out.writeShort(fillValue);
    out.writeInt(1); // cardinality
    out.writeLong(metadata.time); // Timestamp

    // Fetch the stock quad tree of the associated resolution
    StockQuadTree stockQuadTree = getOrCreateStockQuadTree(resolution);
    // Sort values by their respective Z-Order values in linear time
    short[] sortedValues = new short[length];
    for (int i = 0; i < length; i++)
        sortedValues[i] = values[stockQuadTree.r[i]];

    // Write all sorted values
    for (short v : sortedValues)
        out.writeShort(v);

    // Compute aggregate values for all nodes in the tree
    // Go in reverse ID order to ensure children are computed before parents
    Node[] nodes = new Node[stockQuadTree.nodesID.length];
    for (int iNode = stockQuadTree.nodesID.length - 1; iNode >= 0; iNode--) {
        // Initialize all aggregate values
        nodes[iNode] = new Node();

        int firstChildId = stockQuadTree.nodesID[iNode] * 4;
        int firstChildPos = Arrays.binarySearch(stockQuadTree.nodesID, firstChildId);
        boolean isLeaf = firstChildPos < 0;

        if (isLeaf) {
            for (int iVal = stockQuadTree.nodesStartPosition[iNode]; iVal < stockQuadTree.nodesEndPosition[iNode]; iVal++) {
                short value;
                Object val = Array.get(sortedValues, iVal);
                if (val instanceof Short) {
                    value = (Short) val;
                } else {
                    throw new RuntimeException("Cannot handle values of type " + val.getClass());
                }
                if (value != fillValue)
                    nodes[iNode].accumulate(value);
            }
        } else {
            // Compute from the four children
            for (int iChild = 0; iChild < 4; iChild++) {
                int childPos = firstChildPos + iChild;
                nodes[iNode].accumulate(nodes[childPos]);
            }
        }
    }

    // Write nodes to file in sorted order
    for (int iNode = 0; iNode < nodes.length; iNode++)
        nodes[iNode].write(out);
}