Example usage for java.io ByteArrayOutputStream reset

List of usage examples for java.io ByteArrayOutputStream reset

Introduction

In this page you can find the example usage for java.io ByteArrayOutputStream reset.

Prototype

public synchronized void reset() 

Source Link

Document

Resets the count field of this ByteArrayOutputStream to zero, so that all currently accumulated output in the output stream is discarded.

Usage

From source file:com.ebay.jetstream.messaging.transport.zookeeper.ZooKeeperTransport.java

void setData(JetstreamMessage msg) {

    if (!m_initialized.get()) {
        //throw new Exception("ZooKeeperTransport not initialized");
    }/*from  w ww .  ja v  a2s  .  c o m*/

    ZooKeeperDataWrapper wrapperData = new ZooKeeperDataWrapper(msg);
    ByteArrayOutputStream out_stream = new ByteArrayOutputStream(64000);
    out_stream.reset();
    ObjectOutputStream out;
    try {
        out = new ObjectOutputStream(out_stream);
        out.writeObject(wrapperData);
        out.flush();
    } catch (IOException e) {
        LOGGER.warn(e.getLocalizedMessage());
    }

    byte buf[] = out_stream.toByteArray();
    String path = prependPath(msg.getTopic().getTopicName());

    String ctxtpath = getNettyContext(msg);
    if (ctxtpath != null) {
        path = path + prependPath(ctxtpath);
    }

    LOGGER.info("Topic to Send ::" + path);
    try {
        m_group.setGroupMemberData(path, buf);
    } catch (Throwable t) {
        LOGGER.warn("Exception while sending message to ZK server", t);
    }

    m_totalMsgsSent.increment();
    m_msgsSentPerSec.increment();

    buf = null;

}

From source file:org.apache.hadoop.mapreduce.TestMRJobClient.java

/**
 * print a job list // w w  w  .ja  v a  2s  .co m
 */
protected void testAllJobList(String jobId, Configuration conf) throws Exception {
    ByteArrayOutputStream out = new ByteArrayOutputStream();
    // bad options

    int exitCode = runTool(conf, createJobClient(), new String[] { "-list", "alldata" }, out);
    assertEquals("Exit code", -1, exitCode);
    exitCode = runTool(conf, createJobClient(),
            // all jobs
            new String[] { "-list", "all" }, out);
    assertEquals("Exit code", 0, exitCode);
    BufferedReader br = new BufferedReader(new InputStreamReader(new ByteArrayInputStream(out.toByteArray())));
    String line;
    int counter = 0;
    while ((line = br.readLine()) != null) {
        LOG.info("line = " + line);
        if (line.contains(jobId)) {
            counter++;
        }
    }
    assertEquals(1, counter);
    out.reset();
}

From source file:org.dataconservancy.packaging.tool.ser.XStreamMarshallerFactoryTest.java

/**
 * Insures that the XStreamMarshaller produced by the XStreamMarshallerFactory uses the specified encoding setting;
 * that is, the platform default is <em>not</em> being used.
 *
 * @throws Exception/* w ww  . j av a2s .  c  o  m*/
 */
@Test
public void testEncodingTest() throws Exception {
    // Holders
    ByteArrayOutputStream sink = new ByteArrayOutputStream();
    StreamResult streamResult = new StreamResult(sink);
    StreamSource source = new StreamSource();

    // Byte sequences for XML elements
    byte[] openString = "<string>".getBytes();
    byte[] closeString = "</string>".getBytes();

    underTest.setEncoding("UTF-8");
    XStreamMarshaller utf8Marshaller = underTest.newInstance();

    underTest.setEncoding("ISO-8859-1");
    XStreamMarshaller iso8859Marshaller = underTest.newInstance();

    // Our literal, a "LATIN SMALL LETTER N WITH TILDE"
    // The two byte sequence 0xC3 0xB1 in UTF-8
    // The single byte sequence 0xF1 in ISO-8859-1
    // Java Unicode literal: \u00F1

    String literal = "\u00F1";

    // "<string>".length = 8 characters, so check the 8th and 9th element of the
    // byte array

    // 0xF1 written in UTF-8 should be 0xC3B1
    utf8Marshaller.marshal(literal, streamResult);
    assertEquals(0xC3, 0x000000FF & sink.toByteArray()[openString.length]);
    assertEquals(0xB1, 0x000000FF & sink.toByteArray()[openString.length + 1]);

    sink.reset();

    // 0xF1 written in ISO-8859-1 should be 0xF1
    iso8859Marshaller.marshal(literal, streamResult);
    assertEquals(0xF1, 0x000000FF & sink.toByteArray()[openString.length]);

    // Bytes representing 0xF1 encoded as UTF-8
    byte[] utf8SourceBytes = new byte[openString.length + 2 + closeString.length];
    System.arraycopy(openString, 0, utf8SourceBytes, 0, openString.length);
    utf8SourceBytes[openString.length] = (byte) 0xc3;
    utf8SourceBytes[openString.length + 1] = (byte) 0xb1;
    System.arraycopy(closeString, 0, utf8SourceBytes, openString.length + 2, closeString.length);

    // Bytes representing 0xF1 encoded as ISO-8859-1
    byte[] iso88591bytes = new byte[openString.length + 2 + closeString.length];
    System.arraycopy(openString, 0, iso88591bytes, 0, openString.length);
    iso88591bytes[openString.length] = (byte) 0xF1;
    System.arraycopy(closeString, 0, iso88591bytes, openString.length + 1, closeString.length);

    // The UTF-8 configured marshaller should be able to unmarshal the utf-8 bytes
    source.setInputStream(new ByteArrayInputStream(utf8SourceBytes));
    assertEquals(literal, utf8Marshaller.unmarshal(source));

    // The ISO-8859-1 configured marshaller should be able to unmarshal the ISO-8859-1 bytes
    source.setInputStream(new ByteArrayInputStream(iso88591bytes));
    assertEquals(literal, iso8859Marshaller.unmarshal(source));

    // But if the ISO-8859-1 marshaller tries to unmarshal utf-8 bytes...
    source.setInputStream(new ByteArrayInputStream(utf8SourceBytes));
    assertNotEquals(literal, iso8859Marshaller.unmarshal(source));

    // Or if the UTF-8 marshaller tries to unmarshal iso-8859-1 bytes...
    source.setInputStream(new ByteArrayInputStream(iso88591bytes));
    assertNotEquals(literal, utf8Marshaller.unmarshal(source));
}

From source file:org.apache.hadoop.ipc.chinamobile.Server.java

/**
 * Setup response for the IPC Call.//  w w w . ja  v  a2 s  .  co m
 * 
 * @param response buffer to serialize the response into
 * @param call {@link Call} to which we are setting up the response
 * @param status {@link Status} of the IPC call
 * @param rv return value for the IPC Call, if the call was successful
 * @param errorClass error class, if the the call failed
 * @param error error message, if the call failed
 * @throws IOException
 */
private void setupResponse(ByteArrayOutputStream response, Call call, Status status, Writable rv,
        String errorClass, String error) throws IOException {
    response.reset();
    DataOutputStream out = new DataOutputStream(response);
    out.writeInt(call.id); // write call id
    out.writeInt(status.state); // write status

    if (status == Status.SUCCESS) {
        rv.write(out);
    } else {
        WritableUtils.writeString(out, errorClass);
        WritableUtils.writeString(out, error);
    }
    call.setResponse(ByteBuffer.wrap(response.toByteArray()));
}

From source file:com.juick.android.JASocketClient.java

public void readLoop() {
    long l = System.currentTimeMillis();
    String cause = "??";
    try {/*  ww  w .jav a  2  s .c o m*/
        int b;
        //StringBuilder buf = new StringBuilder();
        ByteArrayOutputStream baos = new ByteArrayOutputStream();
        final InputStream stream = is;
        if (stream != null) {
            while (true) {
                try {
                    b = stream.read();
                    if (b == -1) {
                        cause = "short read";
                        break;
                    }
                } catch (SocketTimeoutException e) {
                    if (listener != null) {
                        if (!listener.onNoDataFromSocket()) {
                            cause = "no pong reply withing interval";
                            break;
                        }
                    }
                    continue;
                }
                if (shuttingDown) {
                    cause = "shut down client";
                    break;
                }
                markActivity();
                if (b == '\n') {
                    if (listener != null) {
                        listener.onWebSocketTextFrame(new String(baos.toByteArray(), "utf-8"));
                        baos.reset();
                    }
                } else {
                    baos.write(b);
                }
            }
        }
    } catch (Exception e) {
        cause = e.toString();
    } finally {
        l = System.currentTimeMillis() - l;
        log("DISCONNECT readLoop: " + l + " msec worked, cause=" + cause);
        disconnect();
    }
}

From source file:com.linkedin.pinot.integration.tests.BaseClusterIntegrationTest.java

public static void pushRandomAvroIntoKafka(File avroFile, String kafkaBroker, String kafkaTopic, int rowCount,
        Random random) {/*ww w  . j a  v a  2 s  . c om*/
    Properties properties = new Properties();
    properties.put("metadata.broker.list", kafkaBroker);
    properties.put("serializer.class", "kafka.serializer.DefaultEncoder");
    properties.put("request.required.acks", "1");

    ProducerConfig producerConfig = new ProducerConfig(properties);
    Producer<String, byte[]> producer = new Producer<String, byte[]>(producerConfig);
    try {
        ByteArrayOutputStream outputStream = new ByteArrayOutputStream(65536);
        DataFileStream<GenericRecord> reader = AvroUtils.getAvroReader(avroFile);
        BinaryEncoder binaryEncoder = new EncoderFactory().directBinaryEncoder(outputStream, null);
        Schema avroSchema = reader.getSchema();
        GenericDatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<GenericRecord>(avroSchema);
        int recordCount = 0;

        int rowsRemaining = rowCount;
        int messagesInThisBatch = 0;
        while (rowsRemaining > 0) {
            int rowsInThisBatch = Math.min(rowsRemaining, MAX_MESSAGES_PER_BATCH);
            List<KeyedMessage<String, byte[]>> messagesToWrite = new ArrayList<KeyedMessage<String, byte[]>>(
                    rowsInThisBatch);
            GenericRecord genericRecord = new GenericData.Record(avroSchema);

            for (int i = 0; i < rowsInThisBatch; ++i) {
                generateRandomRecord(genericRecord, avroSchema, random);
                outputStream.reset();
                datumWriter.write(genericRecord, binaryEncoder);
                binaryEncoder.flush();

                byte[] bytes = outputStream.toByteArray();
                KeyedMessage<String, byte[]> data = new KeyedMessage<String, byte[]>(kafkaTopic, bytes);

                if (BATCH_KAFKA_MESSAGES) {
                    messagesToWrite.add(data);
                    messagesInThisBatch++;
                    if (MAX_MESSAGES_PER_BATCH <= messagesInThisBatch) {
                        messagesInThisBatch = 0;
                        producer.send(messagesToWrite);
                        messagesToWrite.clear();
                        Uninterruptibles.sleepUninterruptibly(1, TimeUnit.SECONDS);
                    }
                } else {
                    producer.send(data);
                }
                recordCount += 1;
            }

            if (BATCH_KAFKA_MESSAGES) {
                producer.send(messagesToWrite);
            }

            System.out.println("rowsRemaining = " + rowsRemaining);
            rowsRemaining -= rowsInThisBatch;
        }

        outputStream.close();
        reader.close();
        LOGGER.info("Finished writing " + recordCount + " records from " + avroFile.getName()
                + " into Kafka topic " + kafkaTopic);
        int totalRecordCount = totalAvroRecordWrittenCount.addAndGet(recordCount);
        LOGGER.info("Total records written so far " + totalRecordCount);
    } catch (Exception e) {
        e.printStackTrace();
        throw new RuntimeException(e);
    }
}

From source file:eu.stratosphere.nephele.ipc.Server.java

/**
 * Setup response for the IPC Call.// www .j  a  v  a 2s.c  om
 * 
 * @param response
 *        buffer to serialize the response into
 * @param call
 *        {@link Call} to which we are setting up the response
 * @param status
 *        {@link Status} of the IPC call
 * @param rv
 *        return value for the IPC Call, if the call was successful
 * @param errorClass
 *        error class, if the the call failed
 * @param error
 *        error message, if the call failed
 * @throws IOException
 */
private void setupResponse(ByteArrayOutputStream response, Call call, Status status, IOReadableWritable rv,
        String errorClass, String error) throws IOException {
    response.reset();
    DataOutputStream out = new DataOutputStream(response);
    out.writeInt(call.id); // write call id
    out.writeInt(status.state); // write status

    if (status == Status.SUCCESS) {
        if (rv == null) {
            out.writeBoolean(false);
        } else {
            out.writeBoolean(true);
            StringRecord.writeString(out, rv.getClass().getName());
            rv.write(out);
        }

    } else {
        StringRecord.writeString(out, errorClass);
        StringRecord.writeString(out, error);
    }
    call.setResponse(ByteBuffer.wrap(response.toByteArray()));
}

From source file:org.apache.flink.runtime.ipc.Server.java

/**
 * Setup response for the IPC Call./* ww w.  ja  v a2s  .c o  m*/
 * 
 * @param response
 *        buffer to serialize the response into
 * @param call
 *        {@link Call} to which we are setting up the response
 * @param status
 *        {@link Status} of the IPC call
 * @param rv
 *        return value for the IPC Call, if the call was successful
 * @param errorClass
 *        error class, if the the call failed
 * @param error
 *        error message, if the call failed
 * @throws IOException
 */
private void setupResponse(ByteArrayOutputStream response, Call call, Status status, IOReadableWritable rv,
        String errorClass, String error) throws IOException {
    response.reset();
    DataOutputStream out = new DataOutputStream(response);
    out.writeInt(call.id); // write call id
    out.writeInt(status.state); // write status

    if (status == Status.SUCCESS) {
        if (rv == null) {
            out.writeBoolean(false);
        } else {
            out.writeBoolean(true);
            StringRecord.writeString(out, rv.getClass().getName());
            rv.write(new OutputViewDataOutputStreamWrapper(out));
        }

    } else {
        StringRecord.writeString(out, errorClass);
        StringRecord.writeString(out, error);
    }
    call.setResponse(ByteBuffer.wrap(response.toByteArray()));
}

From source file:net.solarnetwork.node.io.rxtx.SerialPortSupport.java

private boolean findEOFBytes(ByteArrayOutputStream sink, int appendedLength, byte[] eofBytes) {
    byte[] sinkBuf = sink.toByteArray();
    int eofIdx = Math.max(0, sinkBuf.length - appendedLength - eofBytes.length);
    boolean foundEOF = false;
    for (; eofIdx < (sinkBuf.length - eofBytes.length); eofIdx++) {
        foundEOF = true;//from   w ww .  j ava2s . c om
        for (int j = 0; j < eofBytes.length; j++) {
            if (sinkBuf[eofIdx + j] != eofBytes[j]) {
                foundEOF = false;
                break;
            }
        }
        if (foundEOF) {
            break;
        }
    }
    if (foundEOF) {
        if (eventLog.isDebugEnabled()) {
            eventLog.debug("Found desired {} EOF bytes at index {}", asciiDebugValue(eofBytes), eofIdx);
        }
        sink.reset();
        sink.write(sinkBuf, 0, eofIdx + eofBytes.length);
        if (eventLog.isDebugEnabled()) {
            eventLog.debug("Buffer message at EOF: {}", asciiDebugValue(sink.toByteArray()));
        }
        return true;
    }
    eventLog.debug("Looking for EOF bytes {}", asciiDebugValue(eofBytes));
    return false;
}

From source file:org.apache.pig.builtin.Utf8StorageConverter.java

private Map<String, Object> consumeMap(PushbackInputStream in, ResourceFieldSchema fieldSchema)
        throws IOException {
    int buf;/*from  ww  w  . jav a  2  s  .  c o  m*/
    boolean emptyMap = true;

    while ((buf = in.read()) != '[') {
        if (buf == -1) {
            throw new IOException("Unexpect end of map");
        }
    }
    HashMap<String, Object> m = new HashMap<String, Object>();
    ByteArrayOutputStream mOut = new ByteArrayOutputStream(BUFFER_SIZE);
    while (true) {
        // Read key (assume key can not contains special character such as #, (, [, {, }, ], )
        while ((buf = in.read()) != '#') {
            // end of map
            if (emptyMap && buf == ']') {
                return m;
            }
            if (buf == -1) {
                throw new IOException("Unexpect end of map");
            }
            emptyMap = false;
            mOut.write(buf);
        }
        String key = bytesToCharArray(mOut.toByteArray());
        if (key.length() == 0)
            throw new IOException("Map key can not be null");

        // Read value
        mOut.reset();
        Deque<Character> level = new LinkedList<Character>(); // keep track of nested tuple/bag/map. We do not interpret, save them as bytearray
        while (true) {
            buf = in.read();
            if (buf == -1) {
                throw new IOException("Unexpect end of map");
            }
            if (buf == '[' || buf == '{' || buf == '(') {
                level.push((char) buf);
            } else if (buf == ']' && level.isEmpty()) // End of map
                break;
            else if (buf == ']' || buf == '}' || buf == ')') {
                if (level.isEmpty())
                    throw new IOException("Malformed map");

                if (level.peek() == findStartChar((char) buf))
                    level.pop();
            } else if (buf == ',' && level.isEmpty()) { // Current map item complete
                break;
            }
            mOut.write(buf);
        }
        Object value = null;
        if (fieldSchema != null && fieldSchema.getSchema() != null && mOut.size() > 0) {
            value = bytesToObject(mOut.toByteArray(), fieldSchema.getSchema().getFields()[0]);
        } else if (mOut.size() > 0) { // untyped map
            value = new DataByteArray(mOut.toByteArray());
        }
        m.put(key, value);
        mOut.reset();
        if (buf == ']')
            break;
    }
    return m;
}