Example usage for org.apache.hadoop.fs FileSystem get

List of usage examples for org.apache.hadoop.fs FileSystem get

Introduction

In this page you can find the example usage for org.apache.hadoop.fs FileSystem get.

Prototype

public static FileSystem get(URI uri, Configuration conf) throws IOException 

Source Link

Document

Get a FileSystem for this URI's scheme and authority.

Usage

From source file:cn.lhfei.hadoop.ch04.SequenceFileWriteDemo.java

License:Apache License

public static void main(String[] args) {

    String uri = args[0];//from   w  w w. j a  v  a2  s.c  o  m
    Configuration conf = new Configuration();
    FileSystem fs = null;
    SequenceFile.Writer writer = null;

    try {
        fs = FileSystem.get(URI.create(uri), conf);
        Path path = new Path(uri);

        IntWritable key = new IntWritable();
        Text value = new Text();

        //writer = SequenceFile.createWriter(fs, conf, path, key.getClass(), value.getClass());

        writer = SequenceFile.createWriter(conf, Writer.keyClass(key.getClass()),
                writer.valueClass(value.getClass()));

        for (int i = 0; i < 100; i++) {
            key.set(100 - i);
            value.set(DATA[i % DATA.length]);
            System.out.printf("[%s]\t%s\t%s\n", writer.getLength(), key, value);
            writer.append(key, value);
        }

    } catch (IOException e) {
        e.printStackTrace();
    } finally {
        IOUtils.closeStream(writer);
    }

}

From source file:co.cask.hydrator.plugin.batch.CopybookRecordReader.java

License:Apache License

@Override
public void initialize(InputSplit split, TaskAttemptContext context) throws IOException, InterruptedException {
    // Get configuration
    Configuration conf = context.getConfiguration();
    int fileStructure = net.sf.JRecord.Common.Constants.IO_FIXED_LENGTH;
    Path path = new Path(conf.get(CopybookInputFormat.COPYBOOK_INPUTFORMAT_DATA_HDFS_PATH));
    FileSystem fs = FileSystem.get(path.toUri(), conf);
    // Create input stream for the COBOL copybook contents
    InputStream inputStream = IOUtils
            .toInputStream(conf.get(CopybookInputFormat.COPYBOOK_INPUTFORMAT_CBL_CONTENTS), "UTF-8");
    BufferedInputStream bufferedInputStream = new BufferedInputStream(inputStream);
    try {//from w ww  .java  2s  .c om
        externalRecord = CopybookIOUtils.getExternalRecord(bufferedInputStream);
        recordByteLength = CopybookIOUtils.getRecordLength(externalRecord, fileStructure);

        LineProvider lineProvider = LineIOProvider.getInstance().getLineProvider(fileStructure,
                CopybookIOUtils.FONT);
        reader = LineIOProvider.getInstance().getLineReader(fileStructure, lineProvider);
        LayoutDetail copybook = CopybookIOUtils.getLayoutDetail(externalRecord);

        org.apache.hadoop.mapreduce.lib.input.FileSplit fileSplit = (org.apache.hadoop.mapreduce.lib.input.FileSplit) split;

        start = fileSplit.getStart();
        end = start + fileSplit.getLength();

        BufferedInputStream fileIn = new BufferedInputStream(fs.open(fileSplit.getPath()));
        // Jump to the point in the split at which the first complete record of the split starts,
        // if not the first InputSplit
        if (start != 0) {
            position = start - (start % recordByteLength) + recordByteLength;
            fileIn.skip(position);
        }
        reader.open(fileIn, copybook);

    } catch (Exception e) {
        throw new RuntimeException(e);
    }
}

From source file:co.cask.hydrator.plugin.batch.source.ExcelReaderRegexFilter.java

License:Apache License

@Override
public boolean accept(Path path) {
    try {/*w  w  w.j  a v  a  2  s. co m*/
        fs = FileSystem.get(path.toUri(), conf);
        if (fs.isDirectory(path)) {
            return true;
        }

        boolean patternMatch = true;
        Matcher matcher = pattern.matcher(path.toString());
        patternMatch = matcher.find();
        if (patternMatch && !conf.getBoolean(RE_PROCESS, false)
                && CollectionUtils.isNotEmpty(preProcessedFileList)) {
            patternMatch = !preProcessedFileList.contains(path.toString());
        }

        return patternMatch;
    } catch (IOException e) {
        return false;
    }
}

From source file:co.nubetech.hiho.testdata.SequenceFileForCustomObject.java

License:Apache License

public static void main(String[] args) throws IOException {
    String uri = "inputnew.seq";
    Configuration conf = new Configuration();
    FileSystem fs = FileSystem.get(URI.create(uri), conf);
    Path path = new Path(uri);
    // IntWritable key = new IntWritable();
    // Student value[] = new Student[10];
    SequenceFile.Writer writer = null;
    Student student = new Student();
    try {//from   w ww . j ava 2 s.  c o  m
        writer = SequenceFile.createWriter(fs, conf, path, IntWritable.class, Student.class);
        for (int i = 0; i < 10; i++) {
            student.setId(id[i]);
            student.setName(name[i]);
            student.setAddress(address[i]);
            student.setMobileNumber(mobileNo[i]);
            student.setPercentage(percentage[i]);
            // value[i]=student;

            // System.out.printf("[%s]\t%s\t%s\n", writer.getLength(), key,
            // value);
            writer.append(student.getId(), student);
        }
    } catch (Exception e) {
        e.printStackTrace();
    } finally {
        IOUtils.closeStream(writer);
    }

}

From source file:co.nubetech.hiho.testdata.SequenceFileWriteDemo.java

License:Apache License

public static void main(String[] args) throws IOException {
    String uri = "input2.seq";
    Configuration conf = new Configuration();
    FileSystem fs = FileSystem.get(URI.create(uri), conf);
    Path path = new Path(uri);
    IntWritable key = new IntWritable();
    Text value = new Text();
    SequenceFile.Writer writer = null;
    try {/* w w w.j  av  a2 s .  c o  m*/
        writer = SequenceFile.createWriter(fs, conf, path, key.getClass(), value.getClass());
        for (int i = 0; i < 2; i++) {
            key.set(2 - i);
            value.set(DATA[i % DATA.length]);
            System.out.printf("[%s]\t%s\t%s\n", writer.getLength(), key, value);
            writer.append(key, value);
        }
    } catch (Exception e) {
        e.printStackTrace();
    } finally {
        IOUtils.closeStream(writer);
    }
}

From source file:colossal.pipe.ColPipe.java

License:Apache License

public ColPipe(Class<?> jarClass) {
    baseConf.setJarByClass(jarClass);/*from   w  ww .j  a  v a  2 s  .c o  m*/
    baseConf.set("mapred.job.reuse.jvm.num.tasks", "-1");
    try {
        FileSystem fs = FileSystem.get(new URI("/"), baseConf);
        FileSystem localfs = FileSystem.getLocal(baseConf);
        if (fs.equals(localfs)) {
            baseConf.setNumReduceTasks(2); // run only 2 reducers for local
        } else {
            baseConf.setNumReduceTasks(32); // default to 32 reducers - need to tune this
        }
    } catch (Exception e) {
        throw new RuntimeException(e);
    }
}

From source file:com.ailk.oci.ocnosql.tools.load.mutiple.MutipleColumnImportTsv.java

License:Apache License

public boolean execute(Connection conn, OciTableRef table) {
    if (conn == null) {
        msg = "Connection object must not be null";
        retMap.put(FAILED_REASON, msg);//ww  w  . j  a va2s.  c o  m
        LOG.error(msg);
        throw new ClientRuntimeException(msg);
    }
    Configuration conf = conn.getConf();
    if (table == null) {
        msg = "table must not be null";
        retMap.put(FAILED_REASON, msg);
        LOG.error(msg);
        throw new ClientRuntimeException(msg);
    }

    String tableName = table.getName();
    String column = table.getColumns();
    String seperator = table.getSeperator();
    String inputPath = table.getInputPath();
    String tmpOutPut = table.getImportTmpOutputPath();
    String skipBadLine = table.getSkipBadLine();
    String compressor = table.getCompressor();
    String rowkeyUnique = table.getRowKeyUnique();
    String algoColumn = table.getAlgoColumn();
    String rowkeyGenerator = table.getRowkeyGenerator();
    String rowkeyColumn = table.getRowkeyColumn();
    String callback = table.getCallback();

    if (StringUtils.isEmpty(tableName)) {
        msg = "No " + CommonConstants.TABLE_NAME
                + " specified. Please check config,then try again after refreshing cache";
        retMap.put(FAILED_REASON, msg);
        LOG.error(msg);
        throw new ConfigException(msg);
    }
    conf.set(CommonConstants.TABLE_NAME, tableName);

    if (StringUtils.isEmpty(seperator)) {
        msg = "No " + CommonConstants.SEPARATOR
                + " specified. Please check config,then try again after refreshing cache";
        retMap.put(FAILED_REASON, msg);
        LOG.error(msg);
        throw new ConfigException(msg);
    }
    conf.set(CommonConstants.SEPARATOR, seperator);

    // Make sure columns are specified
    String columns[] = StringUtils.splitByWholeSeparatorPreserveAllTokens(column, ",");
    if (columns == null) {
        msg = "No " + CommonConstants.COLUMNS
                + " specified. Please check config,then try again after refreshing cache";
        retMap.put(FAILED_REASON, msg);
        LOG.error(msg);
        throw new ConfigException(msg);
    }
    conf.set(CommonConstants.COLUMNS, column);

    //      int rowkeysFound = 0;
    //      for (String col : columns) {
    //         if (col.equals(CommonConstants.ROW_KEY))
    //            rowkeysFound++;
    //      }
    //      if (rowkeysFound != 1) {
    //         msg = "Must specify exactly one column as " + CommonConstants.ROW_KEY + ". Please check config,then again after refreshing cache";
    //         retMap.put(FAILED_REASON, msg);
    //         LOG.error(msg);
    //         throw new ConfigException(msg);
    //      }

    if (columns.length < 2) {
        msg = "One or more columns in addition to the row key are required. Please check config,then try again after refreshing cache";
        retMap.put(FAILED_REASON, msg);
        LOG.error(msg);
        throw new ConfigException(msg);
    }

    String[] columnTmp = null;
    for (int i = 0; i < columns.length; i++) {
        columnTmp = columns[i].split(":");
        if (columnTmp != null && columnTmp.length == 2) {
            break;
        }
    }
    conf.set(CommonConstants.SINGLE_FAMILY, columnTmp[0]);
    if (!StringUtils.isEmpty(skipBadLine)) {
        conf.set(CommonConstants.SKIPBADLINE, skipBadLine);
    }
    //?
    conf.set(CommonConstants.COMPRESSOR, (compressor == null) ? DEFAULT_COMPRESSOR : compressor);
    conf.set(CommonConstants.ALGOCOLUMN, algoColumn);
    conf.set(CommonConstants.ROWKEY_GENERATOR, rowkeyGenerator);
    conf.set(CommonConstants.ROWKEYCOLUMN, rowkeyColumn);
    conf.set(CommonConstants.ROWKEYCALLBACK, callback);

    boolean ret = false;
    Counter failCounter = null;
    try {
        hbaseAdmin = new HBaseAdmin(conf);
        TableConfiguration.getInstance().writeTableConfiguration(tableName, column, seperator, conf);
        conf.set(CommonConstants.TABLE_NAME, tableName);
        String hdfs_url = conf.get(CommonConstants.HDFS_URL);
        FileSystem fs = FileSystem.get(URI.create(hdfs_url), conf);
        FileStatus[] fileStatusArr = fs.listStatus(new Path(hdfs_url + inputPath));
        if (fileStatusArr != null && fileStatusArr.length > 0) {
            if (fileStatusArr[0].isFile()) {
                ret = (Boolean) runJob(conf, tableName, inputPath, tmpOutPut)[0];
            }
            int inputPathNum = 0;
            for (FileStatus everyInputPath : fileStatusArr) {
                Path inputPathStr = everyInputPath.getPath();
                String absoluteInputPathStr = inputPath + "/" + inputPathStr.getName();
                boolean retCode = (Boolean) runJob(conf, tableName, absoluteInputPathStr,
                        tmpOutPut + "/" + inputPathStr.getName())[0];
                if (retCode) {
                    String base64Seperator = conf.get(CommonConstants.SEPARATOR);
                    conf.set(CommonConstants.SEPARATOR, new String(Base64.decode(base64Seperator))); //?separator
                    if (inputPathNum == fileStatusArr.length - 1) {
                        ret = true;
                    }
                    inputPathNum++;
                    continue;
                } else { //
                    ret = false;
                    inputPathNum++;
                    break;
                }
            }
        }
    } catch (Exception e) {
        msg = "job execute failed,nested exception is " + e;
        retMap.put(FAILED_REASON, msg);
        LOG.error(msg);
        throw new ClientRuntimeException(msg);
    }

    if (!ret) {
        msg = "execute job failed,please check map/reduce log in jobtracker page";
        retMap.put(FAILED_REASON, msg);
        return false;
    }
    return true;
}

From source file:com.ailk.oci.ocnosql.tools.load.mutiple.MutipleColumnImportTsv.java

License:Apache License

/**
 * Main entry point./*  www.j  ava2  s .c  o  m*/
 *
 * @param args  The command line parameters.
 * @throws Exception When running the job fails.
 */
public static void main(String[] args) throws Exception {
    long inputLineNum = 0L;
    long badLineNum = 0L;
    long outputLineNum = 0L;
    Configuration conf = HBaseConfiguration.create();
    String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
    if (otherArgs.length < 2) {
        usage("Wrong number of arguments: " + otherArgs.length);
        System.exit(-1);
    }
    // Make sure columns are specified
    String columns = conf.get(CommonConstants.COLUMNS);
    if (columns == null) {
        usage("No columns specified. Please specify with -D" + CommonConstants.COLUMNS + "=...");
        System.exit(-1);
    }
    String seperator = conf.get(CommonConstants.SEPARATOR);
    if (StringUtils.isEmpty(seperator)) {
        conf.set(CommonConstants.SEPARATOR, CommonConstants.DEFAULT_SEPARATOR);
        seperator = CommonConstants.DEFAULT_SEPARATOR;
    }
    // Make sure one or more columns are specified
    if (columns.split(",").length < 2) {
        usage("One or more columns in addition to the row key are required");
        System.exit(-1);
    }
    //make sure tableName and columns are upper to used by phoenix.
    columns = columns.toUpperCase();
    String notNeedLoadColumnsStr = conf.get(CommonConstants.NOTNEEDLOADCOLUMNS);
    String notNeedLoadColumns = null;
    if (!StringUtils.isEmpty(notNeedLoadColumnsStr)) {
        notNeedLoadColumns = notNeedLoadColumnsStr.toUpperCase();
        conf.set(CommonConstants.NOTNEEDLOADCOLUMNS, notNeedLoadColumns);
    }

    String writeTableConfigColumns = getWriteConfigColumn(columns, notNeedLoadColumns);
    hbaseAdmin = new HBaseAdmin(conf);
    String tableName = otherArgs[0].toUpperCase();
    String inputPath = otherArgs[1];
    String tmpOutputPath = conf.get(CommonConstants.IMPORT_TMP_OUTPUT);
    conf.set(CommonConstants.TABLE_NAME, tableName);
    conf.set(CommonConstants.COLUMNS, columns);
    String pathStr = conf.get(CommonConstants.HDFS_URL) + inputPath;
    FileSystem fs = FileSystem.get(URI.create(conf.get(CommonConstants.HDFS_URL)), conf);
    FileStatus[] fileStatusArr = fs.listStatus(new Path(pathStr));
    if (fileStatusArr != null && fileStatusArr.length > 0) {
        TableConfiguration.getInstance().writeTableConfiguration(tableName, writeTableConfigColumns, seperator,
                conf);
        if (fileStatusArr[0].isFile()) {
            Object[] resObjs = runJob(conf, tableName, inputPath, tmpOutputPath);
            inputLineNum = (Long) resObjs[1];
            outputLineNum = (Long) resObjs[2];
            badLineNum = (Long) resObjs[3];
            LOG.info("Bulkload Result={inputLine:" + inputLineNum + ",outputLine:" + outputLineNum + ",badLine:"
                    + badLineNum + "}");
            boolean result = (Boolean) resObjs[0];
            if (result) {
                System.exit(0);
            }
            System.exit(-1);
        }
        for (FileStatus everyInputPath : fileStatusArr) {
            Path inputPathStr = everyInputPath.getPath();
            String absoluteInputPathStr = inputPath + "/" + inputPathStr.getName();
            FileStatus[] subFileStatusArr = fs
                    .listStatus(new Path(conf.get(CommonConstants.HDFS_URL) + absoluteInputPathStr));
            if (subFileStatusArr == null || subFileStatusArr.length == 0)//?job
                continue;
            Object[] resObjs = runJob(conf, tableName, absoluteInputPathStr,
                    tmpOutputPath + "/" + inputPathStr.getName());
            boolean ret = (Boolean) resObjs[0];
            if (ret) {
                inputLineNum += (Long) resObjs[1];
                outputLineNum += (Long) resObjs[2];
                badLineNum += (Long) resObjs[3];
                String seperatorStr = conf.get(CommonConstants.SEPARATOR);
                conf.set(CommonConstants.SEPARATOR, new String(Base64.decode(seperatorStr))); //?separator
                continue;
            } else { //
                LOG.error("Bulkload Result={inputLine:" + inputLineNum + ",outputLine:" + outputLineNum
                        + ",badLine:" + badLineNum + "}");
                System.exit(-1);
            }
        }
        LOG.info("Bulkload Result={inputLine:" + inputLineNum + ",outputLine:" + outputLineNum + ",badLine:"
                + badLineNum + "}");
    }
    LOG.info("Bulkload Result={inputLine:" + inputLineNum + ",outputLine:" + outputLineNum + ",badLine:"
            + badLineNum + "}");
    System.exit(0);//
}

From source file:com.ailk.oci.ocnosql.tools.load.single.SingleColumnImportTsv.java

License:Apache License

public boolean execute(Connection conn, OciTableRef table) {
    if (conn == null) {
        msg = "Connection object must not be null";
        retMap.put(FAILED_REASON, msg);//from w  w w  .j  a v a 2 s.c  o m
        LOG.error(msg);
        throw new ClientRuntimeException(msg);
    }
    Configuration conf = conn.getConf();
    if (table == null) {
        msg = "table must not be null";
        retMap.put(FAILED_REASON, msg);
        LOG.error(msg);
        throw new ClientRuntimeException(msg);
    }

    String tableName = table.getName();
    String column = table.getColumns();
    String seperator = table.getSeperator();
    String inputPath = table.getInputPath();
    String tmpOutPut = table.getImportTmpOutputPath();
    String skipBadLine = table.getSkipBadLine();
    String compressor = table.getCompressor();
    String rowkeyUnique = table.getRowKeyUnique();
    String algoColumn = table.getAlgoColumn();
    String rowkeyGenerator = table.getRowkeyGenerator();
    String rowkeyColumn = table.getRowkeyColumn();
    String callback = table.getCallback();

    if (StringUtils.isEmpty(tableName)) {
        msg = "No " + CommonConstants.TABLE_NAME
                + " specified. Please check config,then try again after refreshing cache";
        retMap.put(FAILED_REASON, msg);
        LOG.error(msg);
        throw new ConfigException(msg);
    }
    conf.set(CommonConstants.TABLE_NAME, tableName);

    //      if(StringUtils.isEmpty(seperator)){
    //         msg = "No " + CommonConstants.SEPARATOR + " specified. Please check config,then try again after refreshing cache";
    //         retMap.put(FAILED_REASON, msg);
    //         LOG.error(msg);
    //         throw new ConfigException(msg);
    //      }
    //      conf.set(CommonConstants.SEPARATOR, seperator);

    if (StringUtils.isEmpty(seperator)) {
        conf.set(CommonConstants.SEPARATOR, CommonConstants.DEFAULT_SEPARATOR);
    }

    // Make sure columns are specified, splited by ","
    String columns[] = StringUtils.splitByWholeSeparatorPreserveAllTokens(column, ",");
    if (columns == null) {
        msg = "No " + CommonConstants.COLUMNS
                + " specified. Please check config,then try again after refreshing cache";
        retMap.put(FAILED_REASON, msg);
        LOG.error(msg);
        throw new ConfigException(msg);
    }
    conf.set(CommonConstants.COLUMNS, column);

    if (StringUtils.isEmpty(rowkeyColumn) && StringUtils.isEmpty(algoColumn)) {
        msg = "No " + CommonConstants.ROW_KEY
                + " rule specified. Please check config,then try again after refreshing cache";
        retMap.put(FAILED_REASON, msg);
        LOG.error(msg);
        throw new ConfigException(msg);
    }
    conf.set(CommonConstants.SEPARATOR, seperator);

    //      int rowkeysFound = 0;
    //      for (String col : columns) {
    //         if (col.equals(CommonConstants.ROW_KEY))
    //            rowkeysFound++;
    //      }
    //      //HBASE_ROW_KEY?
    //      if (rowkeysFound != 1) {
    //         msg = "Must specify exactly one column as " + CommonConstants.ROW_KEY + ". Please check config,then again after refreshing cache";
    //         retMap.put(FAILED_REASON, msg);
    //         LOG.error(msg);
    //         throw new ConfigException(msg);
    //      }

    //HBASE_ROW_KEY?column
    if (columns.length < 2) {
        msg = "One or more columns in addition to the row key are required. Please check config,then try again after refreshing cache";
        retMap.put(FAILED_REASON, msg);
        LOG.error(msg);
        throw new ConfigException(msg);
    }

    //":"
    String[] columnTmp = null;
    for (int i = 0; i < columns.length; i++) {
        columnTmp = columns[i].split(":");
        if (columnTmp != null && columnTmp.length == 2) {
            break;
        }
    }

    //???
    conf.set(CommonConstants.SINGLE_FAMILY, columnTmp[0]);

    //?
    if (!StringUtils.isEmpty(skipBadLine)) {
        conf.set(CommonConstants.SKIPBADLINE, skipBadLine);
    }
    //?
    conf.set(CommonConstants.COMPRESSOR, (compressor == null) ? DEFAULT_COMPRESSOR : compressor);
    conf.set(CommonConstants.ALGOCOLUMN, algoColumn);
    conf.set(CommonConstants.ROWKEY_GENERATOR, rowkeyGenerator);
    conf.set(CommonConstants.ROWKEYCOLUMN, rowkeyColumn);
    conf.set(CommonConstants.ROWKEYCALLBACK, callback);

    boolean ret = false;
    //      Counter failCounter = null;
    try {
        hbaseAdmin = new HBaseAdmin(conf);
        TableConfiguration.getInstance().writeTableConfiguration(tableName, column, seperator, conf);
        //         Job job = createSubmittableJob(conf, tableName, inputPath, tmpOutPut);
        //         //job
        //         ret = job.waitForCompletion(true);
        //         Counters counters = job.getCounters();
        //         for (String groupName : counters.getGroupNames()) {
        //            failCounter = counters.findCounter(groupName, "NUM_FAILED_MAPS");
        //            if(failCounter != null){
        //               break;
        //            }
        //         }
        conf.set(CommonConstants.TABLE_NAME, tableName);
        String hdfs_url = conf.get(CommonConstants.HDFS_URL);
        FileSystem fs = FileSystem.get(URI.create(hdfs_url), conf);
        FileStatus[] fileStatusArr = fs.listStatus(new Path(hdfs_url + inputPath));
        if (fileStatusArr != null && fileStatusArr.length > 0) {
            if (fileStatusArr[0].isFile()) {
                ret = runJob(conf, tableName, inputPath, tmpOutPut);
            }
            int inputPathNum = 0;
            for (FileStatus everyInputPath : fileStatusArr) {
                Path inputPathStr = everyInputPath.getPath();
                String absoluteInputPathStr = inputPath + "/" + inputPathStr.getName();
                boolean retCode = runJob(conf, tableName, absoluteInputPathStr,
                        tmpOutPut + "/" + inputPathStr.getName());
                if (retCode) {
                    String base64Seperator = conf.get(CommonConstants.SEPARATOR);
                    conf.set(CommonConstants.SEPARATOR, new String(Base64.decode(base64Seperator))); //?separator
                    if (inputPathNum == fileStatusArr.length - 1) {
                        ret = true;
                    }
                    inputPathNum++;
                    continue;
                } else { //
                    ret = false;
                    inputPathNum++;
                    break;
                }
            }
        }

    } catch (Exception e) {
        msg = "job execute failed,nested exception is " + e;
        retMap.put(FAILED_REASON, msg);
        LOG.error(msg);
        throw new ClientRuntimeException(msg);
    }

    boolean result = true;
    if (!ret) {
        msg = "execute job failed,please check map/reduce log in jobtracker page";
        retMap.put(FAILED_REASON, msg);
        result = false;
    }
    /*
    else {
     String[] params = new String[2];
     params[0] = tmpOutPut;
     params[1] = tableName;
     int retrunCode = -1;
     try {
    //bulkload complete
    retrunCode = ToolRunner.run(new LoadIncrementalHFiles(conf),
          params);
     } catch (Exception e) {
    msg = "job execute failed,nested exception is " + e;
    retMap.put(FAILED_REASON, msg);
    LOG.error(msg);
    throw new ClientRuntimeException(msg);
     }
     if(retrunCode != 0) result = false;
    }
    */
    return result;
}

From source file:com.ailk.oci.ocnosql.tools.load.single.SingleColumnImportTsv.java

License:Apache License

/**
 * Main entry point./* ww  w . ja v  a  2 s .  c  o m*/
 *
 * @param args  The command line parameters.
 * @throws Exception When running the job fails.
 */
public static void main(String[] args) throws Exception {
    Map<String, String> map = getProperty();
    if (map == null || map.size() == 0) {
        System.err.println("Error: read conf file " + CONF_FILE + " occur error.");
        System.exit(0);
    }
    Configuration conf = Connection.getInstance().getConf();

    String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
    if (otherArgs.length < 2) {
        usage("Wrong number of arguments: " + otherArgs.length);
        System.exit(-1);
    }

    // Make sure columns are specified
    String columns = conf.get(CommonConstants.COLUMNS);
    if (columns == null) {
        usage("No columns specified. Please specify with -D" + CommonConstants.COLUMNS + "=...");
        System.exit(-1);
    }
    String seperator = conf.get(CommonConstants.SEPARATOR);
    if (seperator == null) {
        conf.set(CommonConstants.SEPARATOR, CommonConstants.DEFAULT_SEPARATOR);
        seperator = CommonConstants.DEFAULT_SEPARATOR;
    }
    // Make sure one or more columns are specified
    if (columns.split(",").length < 2) {
        usage("One or more columns in addition to the row key are required");
        System.exit(-1);
    }
    //make sure tableName and columns are upper to used by phoenix.
    columns = columns.toUpperCase();
    String tableName = otherArgs[0].toUpperCase();
    String inputPath = otherArgs[1];

    hbaseAdmin = new HBaseAdmin(conf);
    String tmpOutputPath = conf.get(CommonConstants.IMPORT_TMP_OUTPUT);
    conf.set(CommonConstants.TABLE_NAME, tableName);
    conf.set(CommonConstants.COLUMNS, columns);
    String hdfs_url = conf.get(CommonConstants.HDFS_URL);
    FileSystem fs = FileSystem.get(URI.create(hdfs_url), conf);
    FileStatus[] fileStatusArr = fs.listStatus(new Path(hdfs_url + inputPath));
    if (fileStatusArr != null && fileStatusArr.length > 0) {
        TableConfiguration.getInstance().writeTableConfiguration(tableName, columns, seperator, conf);
        if (fileStatusArr[0].isFile()) { //??
            boolean result = runJob(conf, tableName, inputPath, tmpOutputPath);
            if (result) {
                System.exit(0);
            }
            System.exit(-1);
        }
        for (FileStatus everyInputPath : fileStatusArr) { //??
            Path inputPathStr = everyInputPath.getPath();
            String absoluteInputPathStr = inputPath + "/" + inputPathStr.getName();
            FileStatus[] subFileStatusArr = fs.listStatus(new Path(hdfs_url + absoluteInputPathStr));
            if (subFileStatusArr == null || subFileStatusArr.length == 0)//?job
                continue;
            boolean ret = runJob(conf, tableName, absoluteInputPathStr,
                    tmpOutputPath + "/" + inputPathStr.getName());
            if (ret) {
                String base64Seperator = conf.get(CommonConstants.SEPARATOR);
                conf.set(CommonConstants.SEPARATOR, new String(Base64.decode(base64Seperator))); //?separator
                continue;
            } else //
                System.exit(-1);

        }
    }
    System.exit(0); //
}