com.inmobi.databus.readers.TestHadoopStreamReader.java Source code

Java tutorial

Introduction

Here is the source code for com.inmobi.databus.readers.TestHadoopStreamReader.java

Source

package com.inmobi.databus.readers;

/*
 * #%L
 * messaging-client-databus
 * %%
 * Copyright (C) 2012 - 2014 InMobi
 * %%
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 * 
 *      http://www.apache.org/licenses/LICENSE-2.0
 * 
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 * #L%
 */

import java.io.IOException;
import java.util.TreeMap;
import java.util.TreeSet;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapred.SequenceFileInputFormat;
import org.testng.annotations.AfterTest;
import org.testng.annotations.BeforeTest;
import org.testng.annotations.Test;

import com.inmobi.databus.partition.PartitionCheckpoint;
import com.inmobi.databus.partition.PartitionCheckpointList;
import com.inmobi.messaging.consumer.util.HadoopUtil;
import com.inmobi.messaging.consumer.util.TestUtil;

public class TestHadoopStreamReader extends TestAbstractDatabusWaitingReader {
    static final Log LOG = LogFactory.getLog(TestHadoopStreamReader.class);

    @BeforeTest
    public void setup() throws Exception {
        consumerNumber = 1;
        files = new String[] { HadoopUtil.files[1], HadoopUtil.files[3], HadoopUtil.files[5] };
        conf = new Configuration();
        fs = FileSystem.getLocal(conf);
        streamDir = new Path(new Path(TestUtil.getConfiguredRootDir(), this.getClass().getSimpleName()), testStream)
                .makeQualified(fs);
        // initialize config
        HadoopUtil.setupHadoopCluster(conf, files, null, finalFiles, streamDir, false);
        inputFormatClass = SequenceFileInputFormat.class.getCanonicalName();
        encoded = false;
        partitionMinList = new TreeSet<Integer>();
        for (int i = 0; i < 60; i++) {
            partitionMinList.add(i);
        }
        chkPoints = new TreeMap<Integer, PartitionCheckpoint>();
        partitionCheckpointList = new PartitionCheckpointList(chkPoints);
    }

    @AfterTest
    public void cleanup() throws IOException {
        LOG.debug("Cleaning up the dir: " + streamDir.getParent());
        fs.delete(streamDir.getParent(), true);
    }

    @Test
    public void testInitialize() throws Exception {
        super.testInitialize();
    }

    @Test
    public void testReadFromStart() throws Exception {
        super.testReadFromStart();
    }

    @Test
    public void testReadFromCheckpoint() throws Exception {
        super.testReadFromCheckpoint();
    }

    @Test
    public void testReadFromTimeStamp() throws Exception {
        super.testReadFromTimeStamp();
    }

    @Override
    Path getStreamsDir() {
        return streamDir;
    }

}