acromusashi.stream.bolt.hdfs.HdfsStoreBolt.java Source code

Java tutorial

Introduction

Here is the source code for acromusashi.stream.bolt.hdfs.HdfsStoreBolt.java

Source

/**
* Copyright (c) Acroquest Technology Co, Ltd. All Rights Reserved.
* Please read the associated COPYRIGHTS file for more details.
*
* THE SOFTWARE IS PROVIDED BY Acroquest Technolog Co., Ltd.,
* WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING
* BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDER BE LIABLE FOR ANY
* CLAIM, DAMAGES SUFFERED BY LICENSEE AS A RESULT OF USING, MODIFYING
* OR DISTRIBUTING THIS SOFTWARE OR ITS DERIVATIVES.
*/
package acromusashi.stream.bolt.hdfs;

import java.io.IOException;
import java.text.MessageFormat;
import java.util.Map;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.storm.task.OutputCollector;
import org.apache.storm.task.TopologyContext;
import org.apache.storm.topology.OutputFieldsDeclarer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import acromusashi.stream.bolt.AmConfigurationBolt;
import acromusashi.stream.entity.StreamMessage;
import acromusashi.stream.exception.InitFailException;

/**
 * ???HDFS??Bolt<br>
 *
 * @author kimura
 */
public class HdfsStoreBolt extends AmConfigurationBolt {
    /** serialVersionUID */
    private static final long serialVersionUID = -2877852415844943739L;

    /** logger */
    private static final Logger logger = LoggerFactory.getLogger(HdfsStoreBolt.class);

    /** HDFS???? */
    private transient HdfsOutputSwitcher delegate = null;

    /**
     * ?????
     */
    public HdfsStoreBolt() {
    }

    /**
     * {@inheritDoc}
     */
    @SuppressWarnings("rawtypes")
    @Override
    public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
        super.prepare(stormConf, context, collector);

        String componentId = context.getThisComponentId();
        int taskId = context.getThisTaskId();

        HdfsStoreConfig config = new HdfsStoreConfig();

        config.setOutputUri((String) stormConf.get("hdfsstorebolt.outputuri"));
        config.setFileNameHeader((String) stormConf.get("hdfsstorebolt.filenameheader"));
        config.setFileSwitchIntarval(((Long) stormConf.get("hdfsstorebolt.interval")).intValue());
        config.setFileNameBody("_" + componentId + "_" + taskId + "_");

        boolean isPreprocess = true;
        Object isPreprocessObj = stormConf.get("hdfsstorebolt.executepreprocess");
        if (isPreprocessObj != null && isPreprocessObj instanceof Boolean) {
            isPreprocess = ((Boolean) isPreprocessObj).booleanValue();
        }

        try {
            // HDFS?
            Configuration conf = new Configuration();
            Path dstPath = new Path(config.getOutputUri());
            FileSystem fileSystem = dstPath.getFileSystem(conf);

            // HDFS???????
            if (isPreprocess) {
                HdfsPreProcessor.execute(fileSystem, config.getOutputUri(),
                        config.getFileNameHeader() + config.getFileNameBody(), config.getTmpFileSuffix());
            }

            this.delegate = new HdfsOutputSwitcher();
            this.delegate.initialize(fileSystem, config, System.currentTimeMillis());
        } catch (Exception ex) {
            logger.warn("Failed to HDFS write initialize.", ex);
            throw new InitFailException(ex);
        }
    }

    @Override
    public void onMessage(StreamMessage message) {
        try {
            this.delegate.appendLine(message.toString(), System.currentTimeMillis());
        } catch (IOException ex) {
            String logFormat = "Fail write to hdfs. Dispose received message. : Message={0}";
            logger.warn(MessageFormat.format(logFormat, message), ex);
        }

        ack();
    }

    @Override
    public void cleanup() {
        // cleanup?LocalCluster?????????
        logger.info("HDFSSinkBolt Cleanup Start.");

        try {
            this.delegate.close();
        } catch (IOException ex) {
            logger.warn("Failed to HDFS write close. Skip close.", ex);
        }

        logger.info("HDFSSinkBolt Cleanup finished.");
    }

    /**
     * {@inheritDoc}
     */
    @Override
    public void declareOutputFields(OutputFieldsDeclarer declarer) {
        // This class not has downstream component.
    }
}