Example usage for org.apache.hadoop.conf Configuration get

List of usage examples for org.apache.hadoop.conf Configuration get

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration get.

Prototype

public String get(String name) 

Source Link

Document

Get the value of the name property, null if no such property exists.

Usage

From source file:com.ailk.oci.ocnosql.tools.load.mutiple.MutipleColumnImportTsv.java

License:Apache License

public boolean execute(Connection conn, OciTableRef table) {
    if (conn == null) {
        msg = "Connection object must not be null";
        retMap.put(FAILED_REASON, msg);//w  ww.  ja  v a 2s  .  com
        LOG.error(msg);
        throw new ClientRuntimeException(msg);
    }
    Configuration conf = conn.getConf();
    if (table == null) {
        msg = "table must not be null";
        retMap.put(FAILED_REASON, msg);
        LOG.error(msg);
        throw new ClientRuntimeException(msg);
    }

    String tableName = table.getName();
    String column = table.getColumns();
    String seperator = table.getSeperator();
    String inputPath = table.getInputPath();
    String tmpOutPut = table.getImportTmpOutputPath();
    String skipBadLine = table.getSkipBadLine();
    String compressor = table.getCompressor();
    String rowkeyUnique = table.getRowKeyUnique();
    String algoColumn = table.getAlgoColumn();
    String rowkeyGenerator = table.getRowkeyGenerator();
    String rowkeyColumn = table.getRowkeyColumn();
    String callback = table.getCallback();

    if (StringUtils.isEmpty(tableName)) {
        msg = "No " + CommonConstants.TABLE_NAME
                + " specified. Please check config,then try again after refreshing cache";
        retMap.put(FAILED_REASON, msg);
        LOG.error(msg);
        throw new ConfigException(msg);
    }
    conf.set(CommonConstants.TABLE_NAME, tableName);

    if (StringUtils.isEmpty(seperator)) {
        msg = "No " + CommonConstants.SEPARATOR
                + " specified. Please check config,then try again after refreshing cache";
        retMap.put(FAILED_REASON, msg);
        LOG.error(msg);
        throw new ConfigException(msg);
    }
    conf.set(CommonConstants.SEPARATOR, seperator);

    // Make sure columns are specified
    String columns[] = StringUtils.splitByWholeSeparatorPreserveAllTokens(column, ",");
    if (columns == null) {
        msg = "No " + CommonConstants.COLUMNS
                + " specified. Please check config,then try again after refreshing cache";
        retMap.put(FAILED_REASON, msg);
        LOG.error(msg);
        throw new ConfigException(msg);
    }
    conf.set(CommonConstants.COLUMNS, column);

    //      int rowkeysFound = 0;
    //      for (String col : columns) {
    //         if (col.equals(CommonConstants.ROW_KEY))
    //            rowkeysFound++;
    //      }
    //      if (rowkeysFound != 1) {
    //         msg = "Must specify exactly one column as " + CommonConstants.ROW_KEY + ". Please check config,then again after refreshing cache";
    //         retMap.put(FAILED_REASON, msg);
    //         LOG.error(msg);
    //         throw new ConfigException(msg);
    //      }

    if (columns.length < 2) {
        msg = "One or more columns in addition to the row key are required. Please check config,then try again after refreshing cache";
        retMap.put(FAILED_REASON, msg);
        LOG.error(msg);
        throw new ConfigException(msg);
    }

    String[] columnTmp = null;
    for (int i = 0; i < columns.length; i++) {
        columnTmp = columns[i].split(":");
        if (columnTmp != null && columnTmp.length == 2) {
            break;
        }
    }
    conf.set(CommonConstants.SINGLE_FAMILY, columnTmp[0]);
    if (!StringUtils.isEmpty(skipBadLine)) {
        conf.set(CommonConstants.SKIPBADLINE, skipBadLine);
    }
    //?
    conf.set(CommonConstants.COMPRESSOR, (compressor == null) ? DEFAULT_COMPRESSOR : compressor);
    conf.set(CommonConstants.ALGOCOLUMN, algoColumn);
    conf.set(CommonConstants.ROWKEY_GENERATOR, rowkeyGenerator);
    conf.set(CommonConstants.ROWKEYCOLUMN, rowkeyColumn);
    conf.set(CommonConstants.ROWKEYCALLBACK, callback);

    boolean ret = false;
    Counter failCounter = null;
    try {
        hbaseAdmin = new HBaseAdmin(conf);
        TableConfiguration.getInstance().writeTableConfiguration(tableName, column, seperator, conf);
        conf.set(CommonConstants.TABLE_NAME, tableName);
        String hdfs_url = conf.get(CommonConstants.HDFS_URL);
        FileSystem fs = FileSystem.get(URI.create(hdfs_url), conf);
        FileStatus[] fileStatusArr = fs.listStatus(new Path(hdfs_url + inputPath));
        if (fileStatusArr != null && fileStatusArr.length > 0) {
            if (fileStatusArr[0].isFile()) {
                ret = (Boolean) runJob(conf, tableName, inputPath, tmpOutPut)[0];
            }
            int inputPathNum = 0;
            for (FileStatus everyInputPath : fileStatusArr) {
                Path inputPathStr = everyInputPath.getPath();
                String absoluteInputPathStr = inputPath + "/" + inputPathStr.getName();
                boolean retCode = (Boolean) runJob(conf, tableName, absoluteInputPathStr,
                        tmpOutPut + "/" + inputPathStr.getName())[0];
                if (retCode) {
                    String base64Seperator = conf.get(CommonConstants.SEPARATOR);
                    conf.set(CommonConstants.SEPARATOR, new String(Base64.decode(base64Seperator))); //?separator
                    if (inputPathNum == fileStatusArr.length - 1) {
                        ret = true;
                    }
                    inputPathNum++;
                    continue;
                } else { //
                    ret = false;
                    inputPathNum++;
                    break;
                }
            }
        }
    } catch (Exception e) {
        msg = "job execute failed,nested exception is " + e;
        retMap.put(FAILED_REASON, msg);
        LOG.error(msg);
        throw new ClientRuntimeException(msg);
    }

    if (!ret) {
        msg = "execute job failed,please check map/reduce log in jobtracker page";
        retMap.put(FAILED_REASON, msg);
        return false;
    }
    return true;
}

From source file:com.ailk.oci.ocnosql.tools.load.mutiple.MutipleColumnImportTsv.java

License:Apache License

/**
 * Main entry point.//from   ww  w.j a  va 2s .  c o m
 *
 * @param args  The command line parameters.
 * @throws Exception When running the job fails.
 */
public static void main(String[] args) throws Exception {
    long inputLineNum = 0L;
    long badLineNum = 0L;
    long outputLineNum = 0L;
    Configuration conf = HBaseConfiguration.create();
    String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
    if (otherArgs.length < 2) {
        usage("Wrong number of arguments: " + otherArgs.length);
        System.exit(-1);
    }
    // Make sure columns are specified
    String columns = conf.get(CommonConstants.COLUMNS);
    if (columns == null) {
        usage("No columns specified. Please specify with -D" + CommonConstants.COLUMNS + "=...");
        System.exit(-1);
    }
    String seperator = conf.get(CommonConstants.SEPARATOR);
    if (StringUtils.isEmpty(seperator)) {
        conf.set(CommonConstants.SEPARATOR, CommonConstants.DEFAULT_SEPARATOR);
        seperator = CommonConstants.DEFAULT_SEPARATOR;
    }
    // Make sure one or more columns are specified
    if (columns.split(",").length < 2) {
        usage("One or more columns in addition to the row key are required");
        System.exit(-1);
    }
    //make sure tableName and columns are upper to used by phoenix.
    columns = columns.toUpperCase();
    String notNeedLoadColumnsStr = conf.get(CommonConstants.NOTNEEDLOADCOLUMNS);
    String notNeedLoadColumns = null;
    if (!StringUtils.isEmpty(notNeedLoadColumnsStr)) {
        notNeedLoadColumns = notNeedLoadColumnsStr.toUpperCase();
        conf.set(CommonConstants.NOTNEEDLOADCOLUMNS, notNeedLoadColumns);
    }

    String writeTableConfigColumns = getWriteConfigColumn(columns, notNeedLoadColumns);
    hbaseAdmin = new HBaseAdmin(conf);
    String tableName = otherArgs[0].toUpperCase();
    String inputPath = otherArgs[1];
    String tmpOutputPath = conf.get(CommonConstants.IMPORT_TMP_OUTPUT);
    conf.set(CommonConstants.TABLE_NAME, tableName);
    conf.set(CommonConstants.COLUMNS, columns);
    String pathStr = conf.get(CommonConstants.HDFS_URL) + inputPath;
    FileSystem fs = FileSystem.get(URI.create(conf.get(CommonConstants.HDFS_URL)), conf);
    FileStatus[] fileStatusArr = fs.listStatus(new Path(pathStr));
    if (fileStatusArr != null && fileStatusArr.length > 0) {
        TableConfiguration.getInstance().writeTableConfiguration(tableName, writeTableConfigColumns, seperator,
                conf);
        if (fileStatusArr[0].isFile()) {
            Object[] resObjs = runJob(conf, tableName, inputPath, tmpOutputPath);
            inputLineNum = (Long) resObjs[1];
            outputLineNum = (Long) resObjs[2];
            badLineNum = (Long) resObjs[3];
            LOG.info("Bulkload Result={inputLine:" + inputLineNum + ",outputLine:" + outputLineNum + ",badLine:"
                    + badLineNum + "}");
            boolean result = (Boolean) resObjs[0];
            if (result) {
                System.exit(0);
            }
            System.exit(-1);
        }
        for (FileStatus everyInputPath : fileStatusArr) {
            Path inputPathStr = everyInputPath.getPath();
            String absoluteInputPathStr = inputPath + "/" + inputPathStr.getName();
            FileStatus[] subFileStatusArr = fs
                    .listStatus(new Path(conf.get(CommonConstants.HDFS_URL) + absoluteInputPathStr));
            if (subFileStatusArr == null || subFileStatusArr.length == 0)//?job
                continue;
            Object[] resObjs = runJob(conf, tableName, absoluteInputPathStr,
                    tmpOutputPath + "/" + inputPathStr.getName());
            boolean ret = (Boolean) resObjs[0];
            if (ret) {
                inputLineNum += (Long) resObjs[1];
                outputLineNum += (Long) resObjs[2];
                badLineNum += (Long) resObjs[3];
                String seperatorStr = conf.get(CommonConstants.SEPARATOR);
                conf.set(CommonConstants.SEPARATOR, new String(Base64.decode(seperatorStr))); //?separator
                continue;
            } else { //
                LOG.error("Bulkload Result={inputLine:" + inputLineNum + ",outputLine:" + outputLineNum
                        + ",badLine:" + badLineNum + "}");
                System.exit(-1);
            }
        }
        LOG.info("Bulkload Result={inputLine:" + inputLineNum + ",outputLine:" + outputLineNum + ",badLine:"
                + badLineNum + "}");
    }
    LOG.info("Bulkload Result={inputLine:" + inputLineNum + ",outputLine:" + outputLineNum + ",badLine:"
            + badLineNum + "}");
    System.exit(0);//
}

From source file:com.ailk.oci.ocnosql.tools.load.single.SingleColumnImporterMapper.java

License:Apache License

/**
 * Handles initializing this class with objects specific to it (i.e., the parser).
 * Common initialization that might be leveraged by a subsclass is done in
 * <code>doSetup</code>. Hence a subclass may choose to override this method
 * and call <code>doSetup</code> as well before handling it's own custom params.
 *
 * @param context//from   w  w  w . j  av a  2s.  co m
 */
@Override
protected void setup(Context context) {
    LOG.info("single set up");
    doSetup(context);

    Configuration conf = context.getConfiguration();

    parser = new SingleColumnImportTsv.TsvParser(conf.get(CommonConstants.COLUMNS), separator);
    //    if (parser.getRowKeyColumnIndex() == -1) {
    //      throw new RuntimeException("No row key column specified");
    //    }
}

From source file:com.ailk.oci.ocnosql.tools.load.single.SingleColumnImporterMapper.java

License:Apache License

/**
 * Handles common parameter initialization that a subclass might want to leverage.
 * @param context//  ww w.  j  a  v a 2s  .c o  m
 */
protected void doSetup(Context context) {
    Configuration conf = context.getConfiguration();

    // If a custom separator has been used,
    // decode it back from Base64 encoding.
    // ???BASE64?
    separator = conf.get(CommonConstants.SEPARATOR);
    if (separator == null) {
        separator = CommonConstants.DEFAULT_SEPARATOR;
    } else {
        separator = new String(Base64.decode(separator));
    }

    skipBadLines = context.getConfiguration().getBoolean(CommonConstants.SKIPBADLINE, true);

    //?badline
    badLineCount = context.getCounter("ImportTsv", "Bad Lines");

    //    String rowkeyGennerator = context.getConfiguration().get(CommonConstants.ROWKEY_GENERATOR);
    //    //???MD5
    //    if(RowKeyGeneratorHolder.TYPE.md5.name().equalsIgnoreCase(rowkeyGennerator)){
    //       rowkeyGenerator = new MD5RowKeyGenerator();
    //    }
    tableName = conf.get(CommonConstants.TABLE_NAME);
    List<GenRKStep> genRKStepList = TableConfiguration.getInstance().getTableGenRKSteps(tableName, conf);
    rowkeyGenerator = new TableRowKeyGenerator(conf, genRKStepList);
    writer = new TextArrayWritable();
}

From source file:com.ailk.oci.ocnosql.tools.load.single.SingleColumnImportTsv.java

License:Apache License

/**
 * Sets up the actual job. importtsvmapreduce job
 *
 * @param conf  The current configuration.
 * @return The newly created job./*from  w  w  w.ja v  a2s . co  m*/
 * @throws IOException When setting up the job fails.
 */
public static Job createSubmittableJob(Configuration conf, String tableName, String inputPath,
        String tmpOutputPath) throws IOException, ClassNotFoundException {

    // Support non-XML supported characters
    // by re-encoding the passed separator as a Base64 string.
    //???BASE64?
    String actualSeparator = conf.get(CommonConstants.SEPARATOR);
    if (actualSeparator != null) {
        conf.set(CommonConstants.SEPARATOR, Base64.encodeBytes(actualSeparator.getBytes()));
    }

    // See if a non-default Mapper was set?mapper?SingleColumnImporterMapper
    String mapperClassName = conf.get(MAPPER_CONF_KEY);
    Class mapperClass = mapperClassName != null ? Class.forName(mapperClassName) : DEFAULT_MAPPER;

    Path inputDir = new Path(inputPath);
    //?job
    Job job = new Job(conf, NAME + "_" + tableName);
    //Set the Jar by finding where a given class came from.
    job.setJarByClass(SingleColumnImportTsv.class);
    //
    FileInputFormat.setInputPaths(job, inputDir);
    //jobinputformat

    //??Dimporttsv.inputFormatInputFormat,TextInputFormat
    //??Dimporttsv.inputFormatInputFormat,TextInputFormat
    String inputFmtName = conf.get(CommonConstants.INPUTFORMAT,
            "org.apache.hadoop.mapreduce.lib.input.TextInputFormat");
    LOG.info(CommonConstants.INPUTFORMAT + " is " + inputFmtName);
    Class<? extends InputFormat> inputFmtClass = Class.forName(inputFmtName).asSubclass(InputFormat.class);
    job.setInputFormatClass(inputFmtClass);
    job.setMapperClass(mapperClass);

    //mapper
    job.setMapperClass(mapperClass);

    String hfileOutPath = tmpOutputPath;
    if (hfileOutPath != null) {
        //?
        if (!doesTableExist(tableName)) {
            createTable(conf, tableName);
        }
        HTable table = new HTable(conf, tableName);
        //reducer
        job.setReducerClass(SingleColumnReducer.class);

        Path outputDir = new Path(hfileOutPath);
        //
        FileOutputFormat.setOutputPath(job, outputDir);
        job.setMapOutputKeyClass(ImmutableBytesWritable.class);
        job.setMapOutputValueClass(TextArrayWritable.class);
        //job?partition?outputformat?reduce
        configureIncrementalLoad(job, table);

    } else {//put
        // No reducers.  Just write straight to table.  Call initTableReducerJob
        // to set up the TableOutputFormat.
        TableMapReduceUtil.initTableReducerJob(tableName, null, job);
        job.setNumReduceTasks(0);
    }

    TableMapReduceUtil.addDependencyJars(job);
    TableMapReduceUtil.addDependencyJars(job.getConfiguration(),
            com.google.common.base.Function.class /* Guava used by TsvParser */);
    return job;
}

From source file:com.ailk.oci.ocnosql.tools.load.single.SingleColumnImportTsv.java

License:Apache License

public boolean execute(Connection conn, OciTableRef table) {
    if (conn == null) {
        msg = "Connection object must not be null";
        retMap.put(FAILED_REASON, msg);/*from  w  w w. ja v a2  s .  co m*/
        LOG.error(msg);
        throw new ClientRuntimeException(msg);
    }
    Configuration conf = conn.getConf();
    if (table == null) {
        msg = "table must not be null";
        retMap.put(FAILED_REASON, msg);
        LOG.error(msg);
        throw new ClientRuntimeException(msg);
    }

    String tableName = table.getName();
    String column = table.getColumns();
    String seperator = table.getSeperator();
    String inputPath = table.getInputPath();
    String tmpOutPut = table.getImportTmpOutputPath();
    String skipBadLine = table.getSkipBadLine();
    String compressor = table.getCompressor();
    String rowkeyUnique = table.getRowKeyUnique();
    String algoColumn = table.getAlgoColumn();
    String rowkeyGenerator = table.getRowkeyGenerator();
    String rowkeyColumn = table.getRowkeyColumn();
    String callback = table.getCallback();

    if (StringUtils.isEmpty(tableName)) {
        msg = "No " + CommonConstants.TABLE_NAME
                + " specified. Please check config,then try again after refreshing cache";
        retMap.put(FAILED_REASON, msg);
        LOG.error(msg);
        throw new ConfigException(msg);
    }
    conf.set(CommonConstants.TABLE_NAME, tableName);

    //      if(StringUtils.isEmpty(seperator)){
    //         msg = "No " + CommonConstants.SEPARATOR + " specified. Please check config,then try again after refreshing cache";
    //         retMap.put(FAILED_REASON, msg);
    //         LOG.error(msg);
    //         throw new ConfigException(msg);
    //      }
    //      conf.set(CommonConstants.SEPARATOR, seperator);

    if (StringUtils.isEmpty(seperator)) {
        conf.set(CommonConstants.SEPARATOR, CommonConstants.DEFAULT_SEPARATOR);
    }

    // Make sure columns are specified, splited by ","
    String columns[] = StringUtils.splitByWholeSeparatorPreserveAllTokens(column, ",");
    if (columns == null) {
        msg = "No " + CommonConstants.COLUMNS
                + " specified. Please check config,then try again after refreshing cache";
        retMap.put(FAILED_REASON, msg);
        LOG.error(msg);
        throw new ConfigException(msg);
    }
    conf.set(CommonConstants.COLUMNS, column);

    if (StringUtils.isEmpty(rowkeyColumn) && StringUtils.isEmpty(algoColumn)) {
        msg = "No " + CommonConstants.ROW_KEY
                + " rule specified. Please check config,then try again after refreshing cache";
        retMap.put(FAILED_REASON, msg);
        LOG.error(msg);
        throw new ConfigException(msg);
    }
    conf.set(CommonConstants.SEPARATOR, seperator);

    //      int rowkeysFound = 0;
    //      for (String col : columns) {
    //         if (col.equals(CommonConstants.ROW_KEY))
    //            rowkeysFound++;
    //      }
    //      //HBASE_ROW_KEY?
    //      if (rowkeysFound != 1) {
    //         msg = "Must specify exactly one column as " + CommonConstants.ROW_KEY + ". Please check config,then again after refreshing cache";
    //         retMap.put(FAILED_REASON, msg);
    //         LOG.error(msg);
    //         throw new ConfigException(msg);
    //      }

    //HBASE_ROW_KEY?column
    if (columns.length < 2) {
        msg = "One or more columns in addition to the row key are required. Please check config,then try again after refreshing cache";
        retMap.put(FAILED_REASON, msg);
        LOG.error(msg);
        throw new ConfigException(msg);
    }

    //":"
    String[] columnTmp = null;
    for (int i = 0; i < columns.length; i++) {
        columnTmp = columns[i].split(":");
        if (columnTmp != null && columnTmp.length == 2) {
            break;
        }
    }

    //???
    conf.set(CommonConstants.SINGLE_FAMILY, columnTmp[0]);

    //?
    if (!StringUtils.isEmpty(skipBadLine)) {
        conf.set(CommonConstants.SKIPBADLINE, skipBadLine);
    }
    //?
    conf.set(CommonConstants.COMPRESSOR, (compressor == null) ? DEFAULT_COMPRESSOR : compressor);
    conf.set(CommonConstants.ALGOCOLUMN, algoColumn);
    conf.set(CommonConstants.ROWKEY_GENERATOR, rowkeyGenerator);
    conf.set(CommonConstants.ROWKEYCOLUMN, rowkeyColumn);
    conf.set(CommonConstants.ROWKEYCALLBACK, callback);

    boolean ret = false;
    //      Counter failCounter = null;
    try {
        hbaseAdmin = new HBaseAdmin(conf);
        TableConfiguration.getInstance().writeTableConfiguration(tableName, column, seperator, conf);
        //         Job job = createSubmittableJob(conf, tableName, inputPath, tmpOutPut);
        //         //job
        //         ret = job.waitForCompletion(true);
        //         Counters counters = job.getCounters();
        //         for (String groupName : counters.getGroupNames()) {
        //            failCounter = counters.findCounter(groupName, "NUM_FAILED_MAPS");
        //            if(failCounter != null){
        //               break;
        //            }
        //         }
        conf.set(CommonConstants.TABLE_NAME, tableName);
        String hdfs_url = conf.get(CommonConstants.HDFS_URL);
        FileSystem fs = FileSystem.get(URI.create(hdfs_url), conf);
        FileStatus[] fileStatusArr = fs.listStatus(new Path(hdfs_url + inputPath));
        if (fileStatusArr != null && fileStatusArr.length > 0) {
            if (fileStatusArr[0].isFile()) {
                ret = runJob(conf, tableName, inputPath, tmpOutPut);
            }
            int inputPathNum = 0;
            for (FileStatus everyInputPath : fileStatusArr) {
                Path inputPathStr = everyInputPath.getPath();
                String absoluteInputPathStr = inputPath + "/" + inputPathStr.getName();
                boolean retCode = runJob(conf, tableName, absoluteInputPathStr,
                        tmpOutPut + "/" + inputPathStr.getName());
                if (retCode) {
                    String base64Seperator = conf.get(CommonConstants.SEPARATOR);
                    conf.set(CommonConstants.SEPARATOR, new String(Base64.decode(base64Seperator))); //?separator
                    if (inputPathNum == fileStatusArr.length - 1) {
                        ret = true;
                    }
                    inputPathNum++;
                    continue;
                } else { //
                    ret = false;
                    inputPathNum++;
                    break;
                }
            }
        }

    } catch (Exception e) {
        msg = "job execute failed,nested exception is " + e;
        retMap.put(FAILED_REASON, msg);
        LOG.error(msg);
        throw new ClientRuntimeException(msg);
    }

    boolean result = true;
    if (!ret) {
        msg = "execute job failed,please check map/reduce log in jobtracker page";
        retMap.put(FAILED_REASON, msg);
        result = false;
    }
    /*
    else {
     String[] params = new String[2];
     params[0] = tmpOutPut;
     params[1] = tableName;
     int retrunCode = -1;
     try {
    //bulkload complete
    retrunCode = ToolRunner.run(new LoadIncrementalHFiles(conf),
          params);
     } catch (Exception e) {
    msg = "job execute failed,nested exception is " + e;
    retMap.put(FAILED_REASON, msg);
    LOG.error(msg);
    throw new ClientRuntimeException(msg);
     }
     if(retrunCode != 0) result = false;
    }
    */
    return result;
}

From source file:com.ailk.oci.ocnosql.tools.load.single.SingleColumnImportTsv.java

License:Apache License

/**
 * Main entry point.//from ww  w.  java 2s . c o  m
 *
 * @param args  The command line parameters.
 * @throws Exception When running the job fails.
 */
public static void main(String[] args) throws Exception {
    Map<String, String> map = getProperty();
    if (map == null || map.size() == 0) {
        System.err.println("Error: read conf file " + CONF_FILE + " occur error.");
        System.exit(0);
    }
    Configuration conf = Connection.getInstance().getConf();

    String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
    if (otherArgs.length < 2) {
        usage("Wrong number of arguments: " + otherArgs.length);
        System.exit(-1);
    }

    // Make sure columns are specified
    String columns = conf.get(CommonConstants.COLUMNS);
    if (columns == null) {
        usage("No columns specified. Please specify with -D" + CommonConstants.COLUMNS + "=...");
        System.exit(-1);
    }
    String seperator = conf.get(CommonConstants.SEPARATOR);
    if (seperator == null) {
        conf.set(CommonConstants.SEPARATOR, CommonConstants.DEFAULT_SEPARATOR);
        seperator = CommonConstants.DEFAULT_SEPARATOR;
    }
    // Make sure one or more columns are specified
    if (columns.split(",").length < 2) {
        usage("One or more columns in addition to the row key are required");
        System.exit(-1);
    }
    //make sure tableName and columns are upper to used by phoenix.
    columns = columns.toUpperCase();
    String tableName = otherArgs[0].toUpperCase();
    String inputPath = otherArgs[1];

    hbaseAdmin = new HBaseAdmin(conf);
    String tmpOutputPath = conf.get(CommonConstants.IMPORT_TMP_OUTPUT);
    conf.set(CommonConstants.TABLE_NAME, tableName);
    conf.set(CommonConstants.COLUMNS, columns);
    String hdfs_url = conf.get(CommonConstants.HDFS_URL);
    FileSystem fs = FileSystem.get(URI.create(hdfs_url), conf);
    FileStatus[] fileStatusArr = fs.listStatus(new Path(hdfs_url + inputPath));
    if (fileStatusArr != null && fileStatusArr.length > 0) {
        TableConfiguration.getInstance().writeTableConfiguration(tableName, columns, seperator, conf);
        if (fileStatusArr[0].isFile()) { //??
            boolean result = runJob(conf, tableName, inputPath, tmpOutputPath);
            if (result) {
                System.exit(0);
            }
            System.exit(-1);
        }
        for (FileStatus everyInputPath : fileStatusArr) { //??
            Path inputPathStr = everyInputPath.getPath();
            String absoluteInputPathStr = inputPath + "/" + inputPathStr.getName();
            FileStatus[] subFileStatusArr = fs.listStatus(new Path(hdfs_url + absoluteInputPathStr));
            if (subFileStatusArr == null || subFileStatusArr.length == 0)//?job
                continue;
            boolean ret = runJob(conf, tableName, absoluteInputPathStr,
                    tmpOutputPath + "/" + inputPathStr.getName());
            if (ret) {
                String base64Seperator = conf.get(CommonConstants.SEPARATOR);
                conf.set(CommonConstants.SEPARATOR, new String(Base64.decode(base64Seperator))); //?separator
                continue;
            } else //
                System.exit(-1);

        }
    }
    System.exit(0); //
}

From source file:com.alibaba.wasp.conf.WaspConfiguration.java

License:Apache License

private static void checkDefaultsVersion(Configuration conf) {
    if (conf.getBoolean("wasp.defaults.for.version.skip", Boolean.TRUE))
        return;/* w  w  w.j av a 2  s. c o m*/
    String defaultsVersion = conf.get("wasp.defaults.for.version");
    String thisVersion = VersionInfo.getVersion();
    if (!thisVersion.equals(defaultsVersion)) {
        throw new RuntimeException("wasp-default.xml file seems to be for and old version of Wasp ("
                + defaultsVersion + "), this version is " + thisVersion);
    }
}

From source file:com.alibaba.wasp.jdbc.ConnectionInfo.java

License:Apache License

private void readProperties(Properties info) {
    Object[] list = new Object[info.size()];
    info.keySet().toArray(list);//www  .  j  a  v a 2 s  .co  m
    Configuration conf = null;
    for (Object k : list) {
        String key = StringUtils.toUpperEnglish(k.toString());
        if (properties.containsKey(key)) {
            throw JdbcException.get(SQLErrorCode.DUPLICATE_PROPERTY_1, key);
        }
        Object value = info.get(k);
        if (isKnownSetting(key)) {
            properties.put(key, value);
        } else {
            if (conf == null) {
                conf = getConf();
            }
            if (conf.get(key) != null) {
                properties.put(key, value);
            }
        }
    }
}

From source file:com.alibaba.wasp.jdbc.ConnectionInfo.java

License:Apache License

private void readSettingsFromURL() {
    Configuration conf = getConf();
    int idx = url.indexOf(';');
    if (idx >= 0) {
        String settings = url.substring(idx + 1);
        url = url.substring(0, idx);/*  w  w w  .j a v a  2  s  .  c o  m*/
        String[] list = StringUtils.arraySplit(settings, ';', false);
        for (String setting : list) {
            if (setting.length() == 0) {
                continue;
            }
            int equal = setting.indexOf('=');
            if (equal < 0) {
                throw getFormatException();
            }
            String value = setting.substring(equal + 1);
            String key = setting.substring(0, equal);
            key = StringUtils.toUpperEnglish(key);
            if (!isKnownSetting(key) && conf.get(key) == null) {
                throw JdbcException.get(SQLErrorCode.UNSUPPORTED_SETTING_1, key);
            }
            String old = properties.getProperty(key);
            if (old != null && !old.equals(value)) {
                throw JdbcException.get(SQLErrorCode.DUPLICATE_PROPERTY_1, key);
            }
            properties.setProperty(key, value);
        }
    }
}