Example usage for com.amazonaws.services.kinesis.model Record getData

List of usage examples for com.amazonaws.services.kinesis.model Record getData

Introduction

In this page you can find the example usage for com.amazonaws.services.kinesis.model Record getData.

Prototype


public java.nio.ByteBuffer getData() 

Source Link

Document

The data blob.

Usage

From source file:lumbermill.aws.kcl.internal.RecordProcessor.java

License:Apache License

private int calculateSize(List<Record> records) {
    int bytes = 0;

    for (Record r : records) {
        bytes += r.getData().remaining();
    }/*from   w  w w  .j  a v a  2s  .c om*/

    // We get it in binary, but it's actually sent as Base64
    return bytes * 3 / 2;
}

From source file:org.apache.samza.system.kinesis.consumer.KinesisSystemConsumer.java

License:Apache License

private IncomingMessageEnvelope translate(SystemStreamPartition ssp, Record record) {
    String shardId = processors.get(ssp).getShardId();
    byte[] payload = new byte[record.getData().remaining()];

    metrics.updateMetrics(ssp.getStream(), record);
    record.getData().get(payload);/*from www  .  ja v a 2  s  .  co  m*/
    KinesisSystemConsumerOffset offset = new KinesisSystemConsumerOffset(shardId, record.getSequenceNumber());
    return new KinesisIncomingMessageEnvelope(ssp, offset.toString(), record.getPartitionKey(), payload,
            shardId, record.getSequenceNumber(), record.getApproximateArrivalTimestamp());
}

From source file:org.apache.samza.system.kinesis.metrics.KinesisSystemConsumerMetrics.java

License:Apache License

public void updateMetrics(String stream, Record record) {
    eventReadRates.get(stream).inc();/*from   www  .  ja va  2 s  .  c o m*/
    aggEventReadRate.inc();

    long recordSize = record.getData().array().length + record.getPartitionKey().length();
    eventByteReadRates.get(stream).inc(recordSize);
    aggEventByteReadRate.inc(recordSize);

    long latencyMs = Duration.between(Instant.now(), record.getApproximateArrivalTimestamp().toInstant())
            .toMillis();
    readLatencies.get(stream).update(latencyMs);
    aggReadLatency.update(latencyMs);
}

From source file:org.apache.streams.amazon.kinesis.KinesisPersistReaderTask.java

License:Apache License

@Override
public void run() {

    GetShardIteratorRequest shardIteratorRequest = new GetShardIteratorRequest().withStreamName(this.streamName)
            .withShardId(shardId).withShardIteratorType("TRIM_HORIZON");

    GetShardIteratorResult shardIteratorResult = reader.client.getShardIterator(shardIteratorRequest);

    shardIteratorId = shardIteratorResult.getShardIterator();

    Map<String, Object> metadata = new HashMap<>();
    metadata.put("streamName", streamName);
    metadata.put("shardId", shardId);

    while (true) {

        GetRecordsRequest recordsRequest = new GetRecordsRequest().withShardIterator(shardIteratorId);

        GetRecordsResult recordsResult = reader.client.getRecords(recordsRequest);

        LOGGER.info("{} records {} millis behind {}:{}:{} ", recordsResult.getRecords().size(),
                recordsResult.getMillisBehindLatest(), streamName, shardId, shardIteratorId);

        shardIteratorId = recordsResult.getNextShardIterator();

        List<Record> recordList = recordsResult.getRecords();

        for (Record record : recordList) {
            try {
                byte[] byteArray = record.getData().array();
                //byte[] decoded = Base64.decode(byteArray);
                String message = new String(byteArray, Charset.forName("UTF-8"));
                reader.persistQueue.add(new StreamsDatum(message, record.getPartitionKey(), new DateTime(),
                        new BigInteger(record.getSequenceNumber()), metadata));
            } catch (Exception ex) {
                LOGGER.warn("Exception processing record {}: {}", record, ex);
            }//from   w ww  . j  av a  2 s  .  co  m
        }
        try {
            Thread.sleep(reader.pollInterval);
        } catch (InterruptedException ex) {
            LOGGER.trace("InterruptedException", ex);
        }
    }

}

From source file:org.selman.tweetamo.TweetamoRecordProcessor.java

License:Apache License

/** Process records performing retries as needed. Skip "poison pill" records.
 * @param records//from   w  w  w.  j a v  a2  s  . co  m
 */
private void processRecordsWithRetries(List<Record> records) {
    for (Record record : records) {
        boolean processedSuccessfully = false;
        for (int i = 0; i < NUM_RETRIES; i++) {
            try {
                Status status = TweetSerializer.fromBytes(record.getData());

                if (persistentStore != null) {
                    persistentStore.add(status);
                }

                processedSuccessfully = true;
                break;
            } catch (Throwable t) {
                LOG.warn("Caught throwable while processing record " + record, t);
            }

            // backoff if we encounter an exception.
            try {
                Thread.sleep(BACKOFF_TIME_IN_MILLIS);
            } catch (InterruptedException e) {
                LOG.debug("Interrupted sleep", e);
            }
        }

        if (!processedSuccessfully) {
            LOG.error("Couldn't process record " + record + ". Skipping the record.");
        }
    }
}

From source file:org.swiftshire.nifi.processors.kinesis.consumer.GetKinesisStream.java

License:Apache License

/**
 * {@inheritDoc}/*  ww w  .  j  a v a 2s  .c  o m*/
 */
@Override
public void processRecords(ProcessRecordsInput processRecordsInput, InitializationInput initializationInput) {
    Record lastRecordProcessed = null;
    int processedRecords = 0;
    long timestamp = System.currentTimeMillis();

    FlowFile flowFile = null;
    ProcessSession session = getSessionFactory().createSession();

    try {
        for (Record record : processRecordsInput.getRecords()) {
            try {
                flowFile = session.create();
                StopWatch stopWatch = new StopWatch(true);
                ByteArrayInputStream baos = new ByteArrayInputStream(record.getData().array());
                flowFile = session.importFrom(baos, flowFile);

                Map<String, String> attributes = createAttributes(processRecordsInput, processedRecords,
                        timestamp, record);

                flowFile = session.putAllAttributes(flowFile, attributes);

                session.transfer(flowFile, REL_SUCCESS);

                session.getProvenanceReporter().receive(flowFile,
                        "kinesis://" + streamName + "/" + timestamp + "/" + ++processedRecords,
                        +stopWatch.getElapsed(TimeUnit.MILLISECONDS));

                lastRecordProcessed = record;
            } catch (Exception ex) {
                if (flowFile != null) {
                    session.remove(flowFile);
                }

                getLogger().error(
                        "Error while handling record: " + record + " with exception: " + ex.getMessage());
            }
        }
    } finally {
        try {
            if (lastRecordProcessed != null) {
                processRecordsInput.getCheckpointer().checkpoint(lastRecordProcessed);
            } else {
                processRecordsInput.getCheckpointer().checkpoint();
            }
        } catch (KinesisClientLibDependencyException | InvalidStateException | ThrottlingException
                | ShutdownException ex) {
            getLogger().error("Exception while checkpointing record " + ex.getMessage());
        }
    }

    session.commit();
}

From source file:sample.StringToByteArrayTransformerWithNewlines.java

License:Open Source License

@Override
public String toClass(Record record) {
    return new String(record.getData().array()) + "\n";
}

From source file:samples.elasticsearch.BatchedKinesisMessageModelElasticsearchTransformer.java

License:Open Source License

@SuppressWarnings("unchecked")
@Override/*  w w w  .j  a  v a 2 s .c o m*/
public Collection<KinesisMessageModel> toClass(Record record) throws IOException {

    try (ObjectInputStream ois = new ObjectInputStream(new ByteArrayInputStream(record.getData().array()))) {
        return (Collection<KinesisMessageModel>) ois.readObject();
    } catch (Exception e) {
        String message = "Error reading object from ObjectInputStream: " + new String(record.getData().array());
        LOG.error(message, e);
        throw new IOException(message, e);
    }
}

From source file:samples.elasticsearch.SingleKinesisMessageModelElasticsearchTransformer.java

License:Open Source License

@Override
public KinesisMessageModel toClass(Record record) throws IOException {
    try {/* w  w  w  .j av a2s  . co  m*/
        return new ObjectMapper().readValue(record.getData().array(), KinesisMessageModel.class);
    } catch (IOException e) {
        String message = "Error parsing record from JSON: " + new String(record.getData().array());
        LOG.error(message, e);
        throw new IOException(message, e);
    }
}

From source file:samples.s3.KinesisMessageModelS3Transformer.java

License:Open Source License

@Override
public KinesisMessageModel toClass(Record record) throws IOException {
    try {//from ww  w  .j av a  2 s .  c  o  m
        return KinesisMessageModel.newInstance(new String(record.getData().array(), "UTF-8"));
    } catch (Exception e) {
        String message = "Error parsing record from JSON: " + new String(record.getData().array());
        LOG.error(message, e);
        throw new IOException(message, e);
    }
}