Example usage for com.amazonaws.services.kinesis.model Record getData

List of usage examples for com.amazonaws.services.kinesis.model Record getData

Introduction

In this page you can find the example usage for com.amazonaws.services.kinesis.model Record getData.

Prototype


public java.nio.ByteBuffer getData() 

Source Link

Document

The data blob.

Usage

From source file:com.peel.kinesisStorm.ParsingBolt.java

License:Open Source License

@Override
public void execute(Tuple input) {
    Record record = (Record) input.getValueByField(DefaultKinesisRecordScheme.FIELD_RECORD);
    ByteBuffer buffer = record.getData();
    String data = null;//from  ww w.j  av  a 2s . c om
    String partitionKey = record.getPartitionKey();
    List<Object> FormattedData = null;
    List<Object> tuple = null;
    synchronized (decoder) {
        try {
            data = decoder.decode(buffer).toString();
            FormattedData = new JsonFormatter().alternateParsingData(data);
            tuple = new EventRecordScheme().deserialize(partitionKey, FormattedData);
        } catch (CharacterCodingException e) {
            LOG.error("Exception when decoding record ", e);
        } catch (JsonParseException e) {
            e.printStackTrace();
        } catch (JsonMappingException e) {
            e.printStackTrace();
        } catch (IOException e) {
            e.printStackTrace();
        }
    }
    if (FormattedData != null) {
        _collector.emit("processing", tuple);
    }
    _collector.ack(input);
}

From source file:com.sampath.kinesis.CassandraStoreProcessor.java

License:Open Source License

/** Process records performing retries as needed. Skip "poison pill" records.
 * @param records/*from  w  ww .  j  a  va  2s.c  om*/
 */
private void processRecordsWithRetries(List<Record> records) {
    for (Record record : records) {
        boolean processedSuccessfully = false;
        String data = null;
        for (int i = 0; i < NUM_RETRIES; i++) {
            try {
                // For this app, we interpret the payload as UTF-8 chars.
                data = decoder.decode(record.getData()).toString();
                LOG.debug(record.getSequenceNumber() + ", " + record.getPartitionKey() + ", " + data);
                try {
                    Status tweet = DataObjectFactory.createStatus(data);
                    client.insert("tweets", "tweet_id", "" + tweet.getId(), "tweet", data);
                } catch (Exception e) {
                }
                //
                // Logic to process record goes here.
                //
                processedSuccessfully = true;
                break;
            } catch (CharacterCodingException e) {
                LOG.error("Malformed data: " + data, e);
                break;
            } catch (Throwable t) {
                LOG.warn("Caught throwable while processing record " + record, t);
            }

            // backoff if we encounter an exception.
            try {
                Thread.sleep(BACKOFF_TIME_IN_MILLIS);
            } catch (InterruptedException e) {
                LOG.debug("Interrupted sleep", e);
            }
        }

        if (!processedSuccessfully) {
            LOG.error("Couldn't process record " + record + ". Skipping the record.");
        }
    }
}

From source file:com.srotya.flume.kinesis.source.KinesisSource.java

License:Apache License

@Override
protected Status doProcess() throws EventDeliveryException {
    Status status = Status.READY;/*from   w  ww .  j av a 2  s  . com*/
    GetRecordsRequest recordRequest = new GetRecordsRequest();
    recordRequest.setShardIterator(shardIterator);
    recordRequest.setLimit(putSize);
    GetRecordsResult records = client.getRecords(recordRequest);
    for (Record record : records.getRecords()) {
        try {
            getChannelProcessor().processEvent(serializer.deserialize(record.getData()));
        } catch (Exception e) {
            logger.error("Failed to deserialize event:" + new String(record.getData().array()), e);
        }
    }
    shardIterator = records.getNextShardIterator();
    if (shardIterator == null) {
        getShardIterator();
    }
    return status;
}

From source file:com.tango.flume.kinesis.source.serializer.PlainDeSerializer.java

License:Apache License

@Override
public Event parseEvent(Record record) throws KinesisSerializerException, CharacterCodingException {
    return EventBuilder.withBody(decoder.decode(record.getData()).toString().getBytes());
}

From source file:dbtucker.connect.kinesis.KinesisSourceTask.java

License:Apache License

private SourceRecord toSourceRecord(Map<String, String> sourcePartition, String topic, Record kinesisRecord) {
    // TODO propagate timestamp via
    // `kinesisRecord.getApproximateCreationDateTime.getTime`
    // when that's exposed by Connect
    byte xferData[] = kinesisRecord.getData().array();

    log.debug("Raw kinesis record key {}", kinesisRecord.getPartitionKey().toString());
    log.debug("Raw kinesis data {}", kinesisRecord.getData().toString());
    log.debug("Extracted kinesis data {}", xferData.toString());

    return new SourceRecord(sourcePartition,
            Collections.singletonMap(Keys.SEQNUM, kinesisRecord.getSequenceNumber()), topic,
            RecordMapper.keySchema(), RecordMapper.packKey(kinesisRecord.getPartitionKey()),
            RecordMapper.dataSchema(), RecordMapper.packData(kinesisRecord.getData()));
}

From source file:edu.hawaii.kscmfeedprocessor.AmazonKinesisApplicationSampleRecordProcessor.java

License:Open Source License

/**
 * Process a single record./*from   www .  j a v a 2s. c om*/
 *
 * @param record The record to be processed.
 */
private void processSingleRecord(Record record) throws Exception {
    // TODO Add your own record processing logic here

    String data = null;
    try {
        // For this app, we interpret the payload as UTF-8 chars.
        data = decoder.decode(record.getData()).toString();
        kscmRecordProcessor.processRecord(record.getSequenceNumber(), record.getPartitionKey(), data);
    } catch (CharacterCodingException e) {
        logger.error("Malformed data: " + data, e);
    }
}

From source file:gov.pnnl.cloud.KinesisRecordProcessor.java

License:Open Source License

/** Process records performing retries as needed. Skip "poison pill" records.
 * @param records/*  www .  j  ava  2  s. c  om*/
 */
private void processRecordsWithRetries(List<Record> records) {

    // list of messages to put into Kafka
    List<KeyedMessage<String, String>> list = new ArrayList<KeyedMessage<String, String>>();

    // iterate through the Kinesis records, and make a Kafka record for each

    for (Record record : records) {
        stats.increment(Key.KINESIS_MESSAGE_READ);
        String data = null;
        byte[] recordBytes = record.getData().array();

        Coordinate c = null;

        try {
            // For this app, we interpret the payload as UTF-8 chars.

            // use the ObjectMapper to read the json string and create a tree
            JsonNode node = mapper.readTree(recordBytes);

            JsonNode geo = node.findValue("geo");
            JsonNode coords = geo.findValue("coordinates");

            Iterator<JsonNode> elements = coords.elements();

            double lat = elements.next().asDouble();
            double lng = elements.next().asDouble();

            c = new Coordinate(lat, lng);

        } catch (Exception e) {
            // if we get here, its bad data, ignore and move on to next record
            stats.increment(Key.JSON_PARSE_ERROR);
        }

        String topic = "nocoords";
        if (c != null) {
            topic = "coords";
        }
        KeyedMessage<String, String> message = null;

        message = new KeyedMessage<String, String>(topic, new String(recordBytes));
        list.add(message);

    }

    boolean processedSuccessfully = false;

    for (int i = 0; i < NUM_RETRIES; i++) {
        try {
            producer.send(list);
            stats.increment(Key.KAFKA_MESSAGE_PUT);

            processedSuccessfully = true;
            break;
        } catch (Throwable t) {
            LOG.warn("Caught throwable while processing batch of " + list.size() + " records", t);
        }

        // backoff if we encounter an exception.
        try {
            Thread.sleep(BACKOFF_TIME_IN_MILLIS);
        } catch (InterruptedException e) {
            LOG.debug("Interrupted sleep", e);
        }
    }

    if (!processedSuccessfully) {
        LOG.error("Couldn't process batch of " + list.size() + "records.  What to do now?");
        stats.increment(Key.KAFKA_WRITE_ERROR);

    }
}

From source file:kinesisadaptersample.StreamsRecordProcessor.java

License:Open Source License

@Override
public void processRecords(List<Record> records, IRecordProcessorCheckpointer checkpointer) {
    for (Record record : records) {
        String data = new String(record.getData().array(), Charset.forName("UTF-8"));
        System.out.println(data);
        if (record instanceof RecordAdapter) {
            com.amazonaws.services.dynamodbv2.model.Record streamRecord = ((RecordAdapter) record)
                    .getInternalObject();

            switch (streamRecord.getEventName()) {
            case "INSERT":
            case "MODIFY":
                StreamsAdapterDemoHelper.putItem(dynamoDBClient, tableName,
                        streamRecord.getDynamodb().getNewImage());
                break;
            case "REMOVE":
                StreamsAdapterDemoHelper.deleteItem(dynamoDBClient, tableName,
                        streamRecord.getDynamodb().getKeys().get("Id").getN());
            }//from   w  w  w .  j  a  v  a  2s .  c o  m
        }
        checkpointCounter += 1;
        if (checkpointCounter % 10 == 0) {
            try {
                checkpointer.checkpoint();
            } catch (Exception e) {
                e.printStackTrace();
            }
        }
    }

}

From source file:KinesisStormClickstreamApp.ParseReferrerBolt.java

License:Open Source License

@Override
public void execute(Tuple input, BasicOutputCollector collector) {
    Record record = (Record) input.getValueByField(DefaultKinesisRecordScheme.FIELD_RECORD);
    ByteBuffer buffer = record.getData();
    String data = null;/*from   ww w.  j  av a2s .  co m*/
    try {
        data = decoder.decode(buffer).toString();
        JSONObject jsonObject = new JSONObject(data);

        String referrer = jsonObject.getString("referrer");

        int firstIndex = referrer.indexOf('.');
        int nextIndex = referrer.indexOf('.', firstIndex + 1);
        collector.emit(new Values(referrer.substring(firstIndex + 1, nextIndex)));

    } catch (CharacterCodingException | JSONException | IllegalStateException e) {
        LOG.error("Exception when decoding record ", e);
    }
}

From source file:lumbermill.aws.kcl.internal.RecordProcessor.java

License:Apache License

/**
 * {@inheritDoc}//  ww  w.  ja  v  a2 s. c o  m
 */
@Override
public void processRecords(ProcessRecordsInput processRecordsInput) {
    try {
        List<Record> records = processRecordsInput.getRecords();
        Thread.currentThread().setName(kinesisShardId);
        int bytes = calculateSize(records);

        LOG.debug("Got {} records ({} bytes) and is behind latest with {}", records.size(), bytes,
                printTextBehindLatest(processRecordsInput));

        metricsCallback.shardBehindMs(kinesisShardId, processRecordsInput.getMillisBehindLatest());

        Observable observable = Observable.create(subscriber -> {
            try {
                for (Record record : records) {
                    subscriber.onNext(
                            Codecs.BYTES.from(record.getData().array()).put("_shardId", kinesisShardId));
                }
                subscriber.onCompleted();
                metricsCallback.recordsProcessed(kinesisShardId, records.size());
                metricsCallback.bytesProcessed(kinesisShardId, bytes);
            } catch (RuntimeException e) {
                subscriber.onError(e);
            }
        });

        unitOfWorkListener.apply(observable).toBlocking().subscribe();
        transaction.checkpoint(processRecordsInput.getCheckpointer());
    } catch (RuntimeException t) {
        doOnError(t);
    }
}