Example usage for java.io DataOutputStream writeInt

List of usage examples for java.io DataOutputStream writeInt

Introduction

In this page you can find the example usage for java.io DataOutputStream writeInt.

Prototype

public final void writeInt(int v) throws IOException 

Source Link

Document

Writes an int to the underlying output stream as four bytes, high byte first.

Usage

From source file:me.cybermaxke.merchants.v16r3.SMerchant.java

protected void sendUpdate() {
    if (this.customers.isEmpty()) {
        return;// w  ww .  j av  a  2 s . c  o m
    }

    ByteArrayOutputStream baos0 = new ByteArrayOutputStream();
    DataOutputStream dos0 = new DataOutputStream(baos0);

    // Write the recipe list
    this.offers.a(dos0);

    try {
        dos0.flush();
        dos0.close();
    } catch (IOException e) {
        e.printStackTrace();
    }

    // Get the bytes
    byte[] data = baos0.toByteArray();

    // Send a packet to all the players
    Iterator<Player> it = this.customers.iterator();
    while (it.hasNext()) {
        EntityPlayer player0 = ((CraftPlayer) it.next()).getHandle();

        // Every player has a different window id
        ByteArrayOutputStream baos1 = new ByteArrayOutputStream();
        DataOutputStream dos1 = new DataOutputStream(baos1);

        try {
            dos1.writeInt(player0.activeContainer.windowId);
            dos1.write(data);
            dos1.flush();
            dos1.close();
        } catch (IOException e) {
            e.printStackTrace();
        }

        player0.playerConnection.sendPacket(new Packet250CustomPayload("MC|TrList", baos1.toByteArray()));
    }
}

From source file:com.clearspring.analytics.stream.cardinality.TestHyperLogLogPlus.java

@Test
public void testLegacyCodec_normal() throws IOException {
    int bits = 18;
    int cardinality = 1000000;

    HyperLogLogPlus baseline = new HyperLogLogPlus(bits, 25);
    for (int j = 0; j < cardinality; j++) {
        double val = Math.random();
        baseline.offer(val);
    }//from  w  ww  .j a v a 2s.c  om

    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    DataOutputStream dos = new DataOutputStream(baos);

    dos.writeInt(bits);
    dos.writeInt(25);
    dos.writeInt(0);
    dos.writeInt(baseline.getRegisterSet().size * 4);
    for (int x : baseline.getRegisterSet().readOnlyBits()) {
        dos.writeInt(x);
    }

    byte[] legacyBytes = baos.toByteArray();

    // decode legacy
    HyperLogLogPlus decoded = HyperLogLogPlus.Builder.build(legacyBytes);
    assertEquals(baseline.cardinality(), decoded.cardinality());
    byte[] newBytes = baseline.getBytes();
    assertTrue(newBytes.length < legacyBytes.length);

}

From source file:com.tc.simple.apn.factories.PushByteFactory.java

@Override
public byte[] buildPushBytes(int id, Payload payload) {
    byte[] byteMe = null;
    ByteArrayOutputStream baos = null;
    DataOutputStream dos = null;

    try {/*from   w  ww .  j  a va 2s .co m*/
        baos = new ByteArrayOutputStream();

        dos = new DataOutputStream(baos);

        int expiry = 0; // (int) ((System.currentTimeMillis () / 1000L) + 7200);

        char[] cars = payload.getToken().trim().toCharArray();

        byte[] tokenBytes = Hex.decodeHex(cars);

        //command
        dos.writeByte(1);

        //id
        dos.writeInt(id);

        //expiry
        dos.writeInt(expiry);

        //token length.
        dos.writeShort(tokenBytes.length);

        //token
        dos.write(tokenBytes);

        //payload length
        dos.writeShort(payload.getJson().length());

        logger.log(Level.FINE, payload.getJson());

        //payload.
        dos.write(payload.getJson().getBytes());

        byteMe = baos.toByteArray();

    } catch (Exception e) {
        logger.log(Level.SEVERE, null, e);

    } finally {
        CloseUtils.close(dos);
        CloseUtils.close(baos);
    }

    return byteMe;

}

From source file:org.apache.nifi.distributed.cache.client.DistributedMapCacheClientService.java

private <T> void serialize(final T value, final Serializer<T> serializer, final DataOutputStream dos)
        throws IOException {
    final ByteArrayOutputStream baos = new ByteArrayOutputStream();
    serializer.serialize(value, baos);/* ww  w  .  j a  va 2  s. co m*/
    dos.writeInt(baos.size());
    baos.writeTo(dos);
}

From source file:org.wso2.caching.digest.DOMHASHGenerator.java

/**
 * This is an overloaded method for the digest generation for OMElement
 *
 * @param element           - OMElement to be subjected to the key generation
 * @param digestAlgorithm   - digest algorithm as a String
 * @return byte[] representing the calculated digest over the provided element
 * @throws CachingException if there is an io error or the specified algorithm is incorrect
 *//*  ww w  . j  av a 2 s  .c  o m*/
public byte[] getDigest(OMElement element, String digestAlgorithm) throws CachingException {

    byte[] digest = new byte[0];

    try {

        MessageDigest md = MessageDigest.getInstance(digestAlgorithm);
        ByteArrayOutputStream baos = new ByteArrayOutputStream();
        DataOutputStream dos = new DataOutputStream(baos);
        dos.writeInt(1);
        dos.write(getExpandedName(element).getBytes("UnicodeBigUnmarked"));
        dos.write((byte) 0);
        dos.write((byte) 0);

        Collection attrs = getAttributesWithoutNS(element);
        dos.writeInt(attrs.size());

        Iterator itr = attrs.iterator();
        while (itr.hasNext())
            dos.write(getDigest((OMAttribute) itr.next(), digestAlgorithm));
        OMNode node = element.getFirstOMChild();

        // adjoining Texts are merged,
        // there is  no 0-length Text, and
        // comment nodes are removed.
        int length = 0;
        itr = element.getChildElements();
        while (itr.hasNext()) {
            length++;
            itr.next();
        }
        dos.writeInt(length);

        while (node != null) {
            dos.write(getDigest(node, digestAlgorithm));
            node = node.getNextOMSibling();
        }
        dos.close();
        md.update(baos.toByteArray());

        digest = md.digest();

    } catch (NoSuchAlgorithmException e) {
        handleException(
                "Can not locate the algorithm " + "provided for the digest generation : " + digestAlgorithm, e);
    } catch (IOException e) {
        handleException("Error in calculating the " + "digest value for the OMElement : " + element, e);
    }

    return digest;
}

From source file:com.clearspring.analytics.stream.cardinality.TestHyperLogLogPlus.java

@Test
public void testLegacyCodec_sparse() throws IOException {
    int bits = 18;
    int cardinality = 5000;

    HyperLogLogPlus baseline = new HyperLogLogPlus(bits, 25);
    for (int j = 0; j < cardinality; j++) {
        double val = Math.random();
        baseline.offer(val);
    }/*from  w w  w.j  a v a  2s.  c  om*/

    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    DataOutputStream dos = new DataOutputStream(baos);

    dos.writeInt(bits);
    dos.writeInt(25);
    dos.writeInt(1);
    baseline.mergeTempList();
    int[] sparseSet = baseline.getSparseSet();
    List<byte[]> sparseBytes = new ArrayList<byte[]>(sparseSet.length);

    int prevDelta = 0;
    for (int k : sparseSet) {
        sparseBytes.add(Varint.writeUnsignedVarInt(k - prevDelta));
        prevDelta = k;
    }
    for (byte[] bytes : sparseBytes) {
        dos.writeInt(bytes.length);
        dos.write(bytes);
    }
    dos.writeInt(-1);

    byte[] legacyBytes = baos.toByteArray();

    //  decode legacy
    HyperLogLogPlus decoded = HyperLogLogPlus.Builder.build(legacyBytes);
    assertEquals(baseline.cardinality(), decoded.cardinality());
    byte[] newBytes = baseline.getBytes();
    assertTrue(newBytes.length < legacyBytes.length);

}

From source file:org.openmrs.module.odkconnector.serialization.processor.HttpProcessor.java

/**
 * Process any stream connection to this module
 *
 * @param inputStream  the input stream//from  w  ww  .ja v  a  2  s .  co  m
 * @param outputStream the output stream
 * @throws Exception when the stream processing failed
 */
@Override
public void process(final InputStream inputStream, final OutputStream outputStream) throws Exception {

    GZIPInputStream gzipInputStream = new GZIPInputStream(new BufferedInputStream(inputStream));

    DataInputStream dataInputStream = new DataInputStream(gzipInputStream);
    String username = dataInputStream.readUTF();
    String password = dataInputStream.readUTF();
    Boolean savedSearch = dataInputStream.readBoolean();
    Integer cohortId = 0;
    Integer programId = 0;
    if (StringUtils.equalsIgnoreCase(getAction(), HttpProcessor.PROCESS_PATIENTS)) {
        cohortId = dataInputStream.readInt();
        programId = dataInputStream.readInt();
    }
    dataInputStream.close();

    GZIPOutputStream gzipOutputStream = new GZIPOutputStream(new BufferedOutputStream(outputStream));
    DataOutputStream dataOutputStream = new DataOutputStream(gzipOutputStream);
    try {
        Context.openSession();
        Context.authenticate(username, password);

        dataOutputStream.writeInt(HttpURLConnection.HTTP_OK);
        dataOutputStream.flush();

        if (log.isDebugEnabled()) {
            log.debug("Saved Search Value: " + savedSearch);
            log.debug("Cohort ID: " + cohortId);
            log.debug("Program ID: " + programId);
        }

        Serializer serializer = HandlerUtil.getPreferredHandler(Serializer.class, List.class);
        if (StringUtils.equalsIgnoreCase(getAction(), HttpProcessor.PROCESS_PATIENTS)) {
            ConnectorService connectorService = Context.getService(ConnectorService.class);

            Cohort cohort = new Cohort();
            if (savedSearch) {
                CohortSearchHistory history = new CohortSearchHistory();
                PatientSearchReportObject patientSearchReportObject = (PatientSearchReportObject) Context
                        .getReportObjectService().getReportObject(cohortId);
                if (patientSearchReportObject != null) {
                    if (log.isDebugEnabled()) {
                        log.debug("Object Class: " + patientSearchReportObject.getClass());
                        log.debug("Object Name: " + patientSearchReportObject.getName());
                        log.debug("Object Subtype: " + patientSearchReportObject.getSubType());
                        log.debug("Object Type: " + patientSearchReportObject.getType());
                    }
                    history.addSearchItem(PatientSearch.createSavedSearchReference(cohortId));
                    cohort = history.getPatientSet(0, null);
                }
            } else {
                cohort = Context.getCohortService().getCohort(cohortId);
            }

            if (log.isDebugEnabled())
                log.debug("Cohort data: " + cohort.getMemberIds());

            log.info("Streaming patients information!");
            serializer.write(dataOutputStream, connectorService.getCohortPatients(cohort));

            // check the concept list
            Collection<Concept> concepts = null;
            ConceptConfiguration conceptConfiguration = connectorService.getConceptConfiguration(programId);
            if (conceptConfiguration != null) {

                if (log.isDebugEnabled())
                    log.debug("Printing concept configuration information: " + conceptConfiguration);

                concepts = ConnectorUtils.getConcepts(conceptConfiguration.getConfiguredConcepts());
            }
            log.info("Streaming observations information!");
            serializer.write(dataOutputStream, connectorService.getCohortObservations(cohort, concepts));

            // evaluate and get the applicable form for the patients
            CohortDefinitionService cohortDefinitionService = Context.getService(CohortDefinitionService.class);
            ReportingConnectorService reportingService = Context.getService(ReportingConnectorService.class);
            List<ExtendedDefinition> definitions = reportingService.getAllExtendedDefinition();

            EvaluationContext context = new EvaluationContext();
            context.setBaseCohort(cohort);

            Collection intersectedMemberIds = Collections.emptyList();
            List<SerializedForm> serializedForms = new ArrayList<SerializedForm>();
            for (ExtendedDefinition definition : definitions) {
                if (definition.containsProperty(ExtendedDefinition.DEFINITION_PROPERTY_FORM)) {

                    if (log.isDebugEnabled())
                        log.debug("Evaluating: " + definition.getCohortDefinition().getName());

                    EvaluatedCohort evaluatedCohort = cohortDefinitionService
                            .evaluate(definition.getCohortDefinition(), context);
                    // the cohort could be null, so we don't want to get exception during the intersection process
                    if (cohort != null)
                        intersectedMemberIds = CollectionUtils.intersection(cohort.getMemberIds(),
                                evaluatedCohort.getMemberIds());

                    if (log.isDebugEnabled())
                        log.debug("Cohort data after intersection: " + intersectedMemberIds);

                    for (DefinitionProperty definitionProperty : definition.getProperties()) {
                        // skip retired definition property
                        if (definitionProperty.isRetired())
                            continue;

                        Integer formId = NumberUtils.toInt(definitionProperty.getPropertyValue());
                        for (Object patientId : intersectedMemberIds)
                            serializedForms.add(
                                    new SerializedForm(NumberUtils.toInt(String.valueOf(patientId)), formId));
                    }
                }
            }

            if (log.isDebugEnabled())
                log.debug("Serialized form informations:" + serializedForms);

            log.info("Streaming forms information!");
            serializer.write(dataOutputStream, serializedForms);

        } else {
            if (savedSearch) {
                List<SerializedCohort> serializedCohorts = new ArrayList<SerializedCohort>();
                List<AbstractReportObject> objects = Context.getReportObjectService()
                        .getReportObjectsByType(OpenmrsConstants.REPORT_OBJECT_TYPE_PATIENTSEARCH);
                for (AbstractReportObject object : objects) {
                    SerializedCohort serializedCohort = new SerializedCohort();
                    serializedCohort.setId(object.getReportObjectId());
                    serializedCohort.setName(object.getName());
                    serializedCohorts.add(serializedCohort);
                }
                serializer.write(dataOutputStream, serializedCohorts);

            } else {
                serializer.write(dataOutputStream, Context.getCohortService().getAllCohorts());
            }
        }

        dataOutputStream.close();
    } catch (Exception e) {
        log.error("Processing stream failed!", e);
        dataOutputStream.writeInt(HttpURLConnection.HTTP_UNAUTHORIZED);
        dataOutputStream.close();
    } finally {
        Context.closeSession();
    }
}

From source file:org.apache.fontbox.ttf.TTFSubFont.java

private static long writeTableHeader(DataOutputStream dos, String tag, long offset, byte[] bytes)
        throws IOException {

    int n = bytes.length;
    int nup;/*from   w w  w .  j  av a  2 s . c  o m*/
    long checksum = 0L;

    for (nup = 0; nup < n; ++nup) {
        checksum += (((long) bytes[nup]) & 0xffL) << (24 - (nup % 4) * 8);
    }

    checksum &= 0xffffffffL;

    LOG.debug(String.format("Writing table header [%s,%08x,%08x,%08x]", tag, checksum, offset, bytes.length));

    byte[] tagbytes = tag.getBytes("US-ASCII");

    dos.write(tagbytes, 0, 4);
    dos.writeInt((int) checksum);
    dos.writeInt((int) offset);
    dos.writeInt(bytes.length);

    // account for the checksum twice, one time for the header field, on time for the content itself.
    return buildUint32(tagbytes) + checksum + checksum + offset + bytes.length;
}

From source file:IntSort.java

public void writeStream(String[] sData, boolean[] bData, int[] iData) {
    try {/*  ww  w . j  a  va  2s  .  c  o  m*/
        // Write data into an internal byte array
        ByteArrayOutputStream strmBytes = new ByteArrayOutputStream();

        // Write Java data types into the above byte array
        DataOutputStream strmDataType = new DataOutputStream(strmBytes);

        byte[] record;

        for (int i = 0; i < sData.length; i++) {
            // Write Java data types      
            strmDataType.writeUTF(sData[i]);
            strmDataType.writeBoolean(bData[i]);
            strmDataType.writeInt(iData[i]);

            // Clear any buffered data
            strmDataType.flush();

            // Get stream data into byte array and write record      
            record = strmBytes.toByteArray();
            rs.addRecord(record, 0, record.length);

            // Toss any data in the internal array so writes 
            // starts at beginning (of the internal array)
            strmBytes.reset();
        }

        strmBytes.close();
        strmDataType.close();

    } catch (Exception e) {
        db(e.toString());
    }
}

From source file:com.sky.drovik.player.media.DiskCache.java

private void writeIndex() {
    File tempFile = null;//from ww  w  .ja  v a 2 s  .  c  o  m
    final String tempFilePath = mCacheDirectoryPath;
    final String indexFilePath = getIndexFilePath();
    try {
        tempFile = File.createTempFile("DiskCache", null, new File(tempFilePath));
    } catch (Exception e) {
        Log.e(TAG, "Unable to create or tempFile " + tempFilePath);
        return;
    }
    try {
        final FileOutputStream fileOutput = new FileOutputStream(tempFile);
        final BufferedOutputStream bufferedOutput = new BufferedOutputStream(fileOutput, 1024);
        final DataOutputStream dataOutput = new DataOutputStream(bufferedOutput);

        // Write the index header.
        final int numRecords = mIndexMap.size();
        dataOutput.writeInt(INDEX_HEADER_MAGIC);
        dataOutput.writeInt(INDEX_HEADER_VERSION);
        dataOutput.writeShort(mTailChunk);
        dataOutput.writeInt(numRecords);

        // Write the records.
        for (int i = 0; i < numRecords; ++i) {
            final long key = mIndexMap.keyAt(i);
            final Record record = mIndexMap.valueAt(i);
            dataOutput.writeLong(key);
            dataOutput.writeShort(record.chunk);
            dataOutput.writeInt(record.offset);
            dataOutput.writeInt(record.size);
            dataOutput.writeInt(record.sizeOnDisk);
            dataOutput.writeLong(record.timestamp);
        }

        // Close the file.
        dataOutput.close();

        // Log.d(TAG, "Wrote index with " + numRecords + " records.");

        // Atomically overwrite the old index file.
        tempFile.renameTo(new File(indexFilePath));
    } catch (Exception e) {
        // Was unable to perform the operation, we delete the temp file
        Log.e(TAG, "Unable to write the index file " + indexFilePath);
        tempFile.delete();
    }
}