org.apache.flume.plugins.KafkaSink.java Source code

Java tutorial

Introduction

Here is the source code for org.apache.flume.plugins.KafkaSink.java

Source

/*
 *  Copyright (c) 2013.09.06 BeyondJ2EE.
 *  * All right reserved.
 *  * http://beyondj2ee.github.com
 *  * This software is the confidential and proprietary information of BeyondJ2EE
 *  * , Inc. You shall not disclose such Confidential Information and
 *  * shall use it only in accordance with the terms of the license agreement
 *  * you entered into with BeyondJ2EE.
 *  *
 *  * Revision History
 *  * Author              Date                  Description
 *  * ===============    ================       ======================================
 *  *  beyondj2ee
 *
 */

package org.apache.flume.plugins;

/**
 * KAFKA Flume Sink (Kafka 0.8 Beta, Flume 1.4).
 * User: beyondj2ee
 * Date: 13. 9. 4
 * Time: PM 4:32
 *
 *  by zqh @ 2014-05-14
 * flume-conf.propertiessinkKafkaSink. ?sourcesink.
 * sink. Kafka, ??Producer, ?kafka.
 *
 * We mock tail data as A Flume Source, We want tail data flow to Kafka.
 * KafkaSink means Sink msg to Kafka, means that KafkaSink as a Flume Sink.
 */
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableMap;
import kafka.javaapi.producer.Producer;
import kafka.producer.KeyedMessage;
import kafka.producer.ProducerConfig;
import org.apache.commons.lang.StringUtils;
import org.apache.flume.*;
import org.apache.flume.conf.Configurable;
import org.apache.flume.event.EventHelper;
import org.apache.flume.sink.AbstractSink;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.Properties;

public class KafkaSink extends AbstractSink implements Configurable {
    private static final Logger LOGGER = LoggerFactory.getLogger(KafkaSink.class);

    // flume??
    private Properties parameters;
    private Context context;

    // 
    private Producer<String, String> producer;

    @Override
    public void configure(Context context) {
        this.context = context;
        ImmutableMap<String, String> props = context.getParameters();

        // base: producer.sinks.r
        //??Flume-Agentflume-conf.properties??:
        //bin/flume-ng agent --conf conf --conf-file conf/flume-conf.properties --name producer
        //--name, ??producerproducer.sinks.r
        parameters = new Properties();
        for (String key : props.keySet()) {
            String value = props.get(key);
            this.parameters.put(key, value);
        }
    }

    // ?KafkaSink, 
    @Override
    public synchronized void start() {
        super.start();
        //?flume-conf.propertiesproducer.sinks.rKafkaProducerConfig??
        ProducerConfig config = new ProducerConfig(this.parameters);
        //Kafka?
        this.producer = new Producer<String, String>(config);
    }

    /**
     * FlumeSinkSource?Channel???, Sink
     * SinkKafka, Source?Kafka?.
     * Kafka??, ?Sink?Producer,
     * Source???, ??, ???
     */
    @Override
    public Status process() throws EventDeliveryException {
        Status status = null;

        // Start transaction ?
        Channel ch = getChannel(); //producer.sinks.r.channel
        Transaction txn = ch.getTransaction();
        txn.begin();
        try {
            // This try clause includes whatever Channel operations you want to do
            //Source??Channel,Sink. ??Channel?!
            //Channel??,Source???
            Event event = ch.take();

            String partitionKey = (String) parameters.get(KafkaFlumeConstans.PARTITION_KEY_NAME);
            String encoding = StringUtils.defaultIfEmpty(
                    (String) this.parameters.get(KafkaFlumeConstans.ENCODING_KEY_NAME),
                    KafkaFlumeConstans.DEFAULT_ENCODING);
            // flume-conf.propertiesproducer.sinks.r.custom.topic.name?
            String topic = Preconditions.checkNotNull(
                    (String) this.parameters.get(KafkaFlumeConstans.CUSTOM_TOPIC_KEY_NAME),
                    "custom.topic.name is required");

            String eventData = new String(event.getBody(), encoding);

            KeyedMessage<String, String> data;

            // if partition key does'nt exist
            //Kafka??KeyedMessage, ??
            if (StringUtils.isEmpty(partitionKey)) {
                data = new KeyedMessage<String, String>(topic, eventData);
            } else {
                data = new KeyedMessage<String, String>(topic, partitionKey, eventData);
            }

            if (LOGGER.isInfoEnabled()) {
                LOGGER.info(
                        "Send Message to Kafka : [" + eventData + "] -- [" + EventHelper.dumpEvent(event) + "]");
            }
            // ???, ?topic?
            producer.send(data);

            // ??
            txn.commit();
            status = Status.READY;
        } catch (Throwable t) {
            txn.rollback();
            status = Status.BACKOFF;

            // re-throw all Errors
            if (t instanceof Error) {
                throw (Error) t;
            }
        } finally {
            txn.close();
        }
        return status;
    }

    @Override
    public void stop() {
        producer.close();
    }
}