com.bizosys.oneline.maintenance.Import.java Source code

Java tutorial

Introduction

Here is the source code for com.bizosys.oneline.maintenance.Import.java

Source

/**
 * Copyright 2009 The Apache Software Foundation
 *
 * Licensed to the Apache Software Foundation (ASF) under one
 * or more contributor license agreements.  See the NOTICE file
 * distributed with this work for additional information
 * regarding copyright ownership.  The ASF licenses this file
 * to you under the Apache License, Version 2.0 (the
 * "License"); you may not use this file except in compliance
 * with the License.  You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */
package com.bizosys.oneline.maintenance;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
import org.apache.hadoop.util.GenericOptionsParser;

/**
 * Import data written by {@link Export}.
 */
public class Import {
    final static String NAME = "import";

    /**
     * Write table content out to files in hdfs.
     */
    static class Importer extends TableMapper<ImmutableBytesWritable, Put> {
        /**
         * @param row  The current table row key.
         * @param value  The columns.
         * @param context  The current context.
         * @throws IOException When something is broken with the data.
         * @see org.apache.hadoop.mapreduce.Mapper#map(KEYIN, VALUEIN,
         *   org.apache.hadoop.mapreduce.Mapper.Context)
         */
        @Override
        public void map(ImmutableBytesWritable row, Result value, Context context) throws IOException {
            try {
                context.write(row, resultToPut(row, value));
            } catch (InterruptedException e) {
                e.printStackTrace();
            }
        }

        private static Put resultToPut(ImmutableBytesWritable key, Result result) throws IOException {
            Put put = new Put(key.get());
            for (KeyValue kv : result.raw()) {
                put.add(kv);
            }
            return put;
        }
    }

    /**
     * Sets up the actual job.
     *
     * @param conf  The current configuration.
     * @param args  The command line parameters.
     * @return The newly created job.
     * @throws IOException When setting up the job fails.
     */
    public static Job createSubmittableJob(Configuration conf, String[] args) throws IOException {
        String tableName = args[0];
        Path inputDir = new Path(args[1]);
        Job job = new Job(conf, NAME + "_" + tableName);
        job.setJarByClass(Importer.class);
        System.out.println("Input Dir:" + inputDir);
        FileInputFormat.setInputPaths(job, inputDir);
        job.setInputFormatClass(SequenceFileInputFormat.class);
        job.setMapperClass(Importer.class);
        // No reducers.  Just write straight to table.  Call initTableReducerJob
        // because it sets up the TableOutputFormat.
        TableMapReduceUtil.initTableReducerJob(tableName, null, job);
        job.setNumReduceTasks(0);
        return job;
    }

    /*
     * @param errorMsg Error message.  Can be null.
     */
    private static void usage(final String errorMsg) {
        if (errorMsg != null && errorMsg.length() > 0) {
            System.err.println("ERROR: " + errorMsg);
        }
        System.err.println("Usage: Import <tablename> <inputdir>");
    }

    /**
     * Main entry point.
     *
     * @param args  The command line parameters.
     * @throws Exception When running the job fails.
     */
    public static void main(String[] args) throws Exception {
        Configuration conf = HBaseConfiguration.create();
        String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
        if (otherArgs.length < 2) {
            usage("Wrong number of arguments: " + otherArgs.length);
            System.exit(-1);
        }
        Job job = createSubmittableJob(conf, otherArgs);
        int status = job.waitForCompletion(true) ? 0 : 1;
        String params = (null == args) ? "" : StringUtils.arrayToString(args, '\t');
        if (status == 1) {
            String msg = "Error in Job completetion Params\n tablename \t inputputdir\n " + params;
            System.out.println(msg);
            throw new Exception(msg);
        }
    }
}