com.github.jramos.snowplow.RedshiftSinkModelTransformer.java Source code

Java tutorial

Introduction

Here is the source code for com.github.jramos.snowplow.RedshiftSinkModelTransformer.java

Source

/**
   Portions of this project are copyright Australian Broadcasting Corporation, 2014.
   All other portions are copyright Justin Ramos, 2015.
    
   Licensed under the Apache License, Version 2.0 (the "License");
   you may not use this file except in compliance with the License.
   You may obtain a copy of the License at
    
http://www.apache.org/licenses/LICENSE-2.0
    
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
 */
package com.github.jramos.snowplow;

import com.amazonaws.services.kinesis.connectors.KinesisConnectorConfiguration;
import com.amazonaws.services.kinesis.connectors.interfaces.ITransformer;
import com.amazonaws.services.kinesis.model.Record;
import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.github.jramos.snowplow.SnowplowEventModel;
import com.github.jramos.snowplow.SnowplowEventTransformer;
import com.github.jramos.snowplow.SnowplowKinesisHandlerFactory;
import com.github.jramos.snowplow.SnowplowKinesisHandlerFactory.IKinesisHandler;
import com.github.jramos.snowplow.operators.ISnowplowEventOperator;
import java.util.ArrayList;
import java.util.List;

/**
 * This class transforms Snowplow events into a delimited string suitable
 * to be used for the Redshift COPY command on a 0.9.6 Snowplow schema:
 * 
 * see: https://github.com/snowplow/snowplow/blob/0.9.6/4-storage/redshift-storage/sql/atomic-def.sql
 * 
 * @author Sam Mason (sam.mason@abc.net.au)
 */

public class RedshiftSinkModelTransformer implements ITransformer<SnowplowEventModel, byte[]> {

    private static final Log LOG = LogFactory.getLog(RedshiftSinkModelTransformer.class);

    private final char delim;
    private final IKinesisHandler handler;
    private final SnowplowEventTransformer transformer;

    private List<String> eventAppLogList;
    private List<ISnowplowEventOperator> operators;

    public RedshiftSinkModelTransformer(KinesisConnectorConfiguration config) {
        delim = config.REDSHIFT_DATA_DELIMITER;
        transformer = new SnowplowEventTransformer();
        if (config instanceof RedshiftSinkConfiguration) {
            RedshiftSinkConfiguration redshiftSinkConfiguration = (RedshiftSinkConfiguration) config;
            handler = SnowplowKinesisHandlerFactory
                    .getHandler(redshiftSinkConfiguration.getSnowplowStreamSchemaVersion());

            // get a ref list of app ids for event logging
            if (redshiftSinkConfiguration.hasEventApplicationLogs()) {
                eventAppLogList = redshiftSinkConfiguration.getEventApplicationLogs();
            }

            // instantiate any operators
            if (redshiftSinkConfiguration.hasOperators()) {
                operators = new ArrayList<ISnowplowEventOperator>();
                for (String eventOperatorClassName : redshiftSinkConfiguration.getOperators()) {
                    try {
                        Class operatorClass = Class.forName(eventOperatorClassName);
                        operators.add((ISnowplowEventOperator) operatorClass.newInstance());
                    } catch (Exception e) {
                        LOG.error("Unable to instantiate " + eventOperatorClassName + " : " + e.getMessage());
                    }
                }
            }
        } else {
            handler = SnowplowKinesisHandlerFactory.getHandler(SnowplowKinesisHandlerFactory.CURRENT_VERSION);
        }
    }

    @Override
    public SnowplowEventModel toClass(Record record) throws IOException {
        SnowplowEventModel event = null;
        String dataRow = null;
        try {
            dataRow = new String(record.getData().array(), "UTF-8");
            event = handler.process(dataRow);

            if (eventAppLogList != null) {
                if (eventAppLogList.contains(event.getApp_id())) {
                    LOG.info(dataRow);
                }
            }

            if (operators != null) {
                for (ISnowplowEventOperator operator : operators) {
                    event = operator.apply(event);
                }
            }
        } catch (IOException ioe) {
            LOG.error("toClass. Processing dataRow " + dataRow, ioe);
            throw ioe;
        }
        return event;
    }

    @Override
    public byte[] fromClass(SnowplowEventModel event) throws IOException {
        byte[] bytes = null;
        String redshiftRecord = null;
        try {
            redshiftRecord = transformer.toRedshiftDelimitedString(event, delim);
            if (eventAppLogList != null) {
                if (eventAppLogList.contains(event.getApp_id())) {
                    LOG.info(redshiftRecord);
                }
            }
            bytes = redshiftRecord.getBytes("UTF-8");
        } catch (IOException ioe) {
            LOG.error("fromClass. Redshift string " + redshiftRecord, ioe);
            throw ioe;
        }
        return bytes;
    }
}