Example usage for org.apache.hadoop.conf Configuration get

List of usage examples for org.apache.hadoop.conf Configuration get

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration get.

Prototype

public String get(String name) 

Source Link

Document

Get the value of the name property, null if no such property exists.

Usage

From source file:com.asp.tranlog.TsvImporterMapper.java

License:Apache License

/**
 * Handles initializing this class with objects specific to it (i.e., the
 * parser). Common initialization that might be leveraged by a subsclass is
 * done in <code>doSetup</code>. Hence a subclass may choose to override
 * this method and call <code>doSetup</code> as well before handling it's
 * own custom params.//w  w  w  .j  av  a2s  .  c  om
 * 
 * @param context
 */
@Override
protected void setup(Context context) {
    doSetup(context);

    Configuration conf = context.getConfiguration();

    charset = conf.get(ImportTsv.CHARSET_CONF_KEY);

    parser = new ImportTsv.TsvParser(conf.get(ImportTsv.COLUMNS_CONF_KEY),
            conf.getStrings(ImportTsv.KEYCOLUMNS_CONF_KEY), separator);
    keyColIndex = parser.getRowKeyColumnIndex();
    keyColLen = parser.getRowKeyColumnLen();
    if (keyColIndex == null) {
        throw new RuntimeException("No row key column specified");
    }
    columnTypes = parser.getColType();
    if (columnTypes != null) {
        colDatetimeFormater = new char[columnTypes.length];
        for (int i = 0; i < columnTypes.length; i++)
            colDatetimeFormater[i] = 0;
    }
}

From source file:com.asp.tranlog.TsvImporterMapper.java

License:Apache License

/**
 * Handles common parameter initialization that a subclass might want to
 * leverage.//ww  w.j  ava  2 s.  com
 * 
 * @param context
 */
protected void doSetup(Context context) {
    Configuration conf = context.getConfiguration();

    // If a custom separator has been used,
    // decode it back from Base64 encoding.
    separator = conf.get(ImportTsv.SEPARATOR_CONF_KEY);
    if (separator == null) {
        separator = ImportTsv.DEFAULT_SEPARATOR;
    } else {
        separator = new String(Base64.decode(separator));
    }

    hbase_rowkey_separator = conf.get(ImportTsv.SEPARATOR_CONF_ROWKEY);
    if (hbase_rowkey_separator == null || hbase_rowkey_separator.trim().length() == 0) {
        hbase_rowkey_separator = "";
    } else {
        hbase_rowkey_separator = new String(Base64.decode(hbase_rowkey_separator));
    }

    ts = conf.getLong(ImportTsv.TIMESTAMP_CONF_KEY, System.currentTimeMillis());

    skipBadLines = context.getConfiguration().getBoolean(ImportTsv.SKIP_LINES_CONF_KEY, true);
    badLineCount = context.getCounter("ImportTsv", "Bad Lines");
}

From source file:com.atlantbh.nutch.filter.index.omit.config.OmitIndexingFilterConfiguration.java

License:Apache License

public static OmitIndexingFilterConfiguration getInstance(Configuration configuration) {
    try {//from ww  w.  ja v  a  2  s. c  om

        // Get configuration from Nutch /conf folder
        Reader configReader = configuration
                .getConfResourceAsReader(configuration.get(CONFIG_FILE_PATH_PROPERTY));

        // Initialize JAXB
        JAXBContext context = JAXBContext.newInstance(new Class[] { OmitIndexingFilterConfiguration.class,
                OmitIndexingFilterConfigurationEntry.class, FilteringType.class, Target.class });
        Unmarshaller unmarshaller = context.createUnmarshaller();

        // Initialize configuration
        OmitIndexingFilterConfiguration xPathFilterConfiguration = (OmitIndexingFilterConfiguration) unmarshaller
                .unmarshal(configReader);
        return xPathFilterConfiguration;

    } catch (JAXBException e) {
        log.error("Configuration initialization error!");
    }

    return null;
}

From source file:com.atlantbh.nutch.filter.xpath.DOMContentUtils.java

License:Apache License

public void setConf(Configuration conf) {
    // forceTags is used to override configurable tag ignoring, later on
    Collection<String> forceTags = new ArrayList<String>(1);

    this.conf = conf;
    linkParams.clear();/*from w  ww .  j a v  a2 s.  c  o  m*/
    linkParams.put("a", new LinkParams("a", "href", 1));
    linkParams.put("area", new LinkParams("area", "href", 0));
    if (conf.getBoolean("parser.html.form.use_action", true)) {
        linkParams.put("form", new LinkParams("form", "action", 1));
        if (conf.get("parser.html.form.use_action") != null)
            forceTags.add("form");
    }
    linkParams.put("frame", new LinkParams("frame", "src", 0));
    linkParams.put("iframe", new LinkParams("iframe", "src", 0));
    linkParams.put("script", new LinkParams("script", "src", 0));
    linkParams.put("link", new LinkParams("link", "href", 0));
    linkParams.put("img", new LinkParams("img", "src", 0));

    // remove unwanted link tags from the linkParams map
    String[] ignoreTags = conf.getStrings("parser.html.outlinks.ignore_tags");
    for (int i = 0; ignoreTags != null && i < ignoreTags.length; i++) {
        if (!forceTags.contains(ignoreTags[i]))
            linkParams.remove(ignoreTags[i]);
    }
}

From source file:com.atlantbh.nutch.index.alternativedataflow.conf.AlternativeDataFlowIndexingFilterConfiguration.java

License:Apache License

public static AlternativeDataFlowIndexingFilterConfiguration getInstance(Configuration configuration) {
    try {/*from  w ww  .  j  av  a 2s  .c  o  m*/

        // Get configuration from Nutch /conf folder
        Reader configReader = configuration
                .getConfResourceAsReader(configuration.get(CONFIG_FILE_PATH_PROPERTY));

        // Initialize JAXB
        JAXBContext context = JAXBContext.newInstance(
                new Class[] { AlternativeDataFlowIndexingFilterConfiguration.class, Entry.class, Field.class });
        Unmarshaller unmarshaller = context.createUnmarshaller();

        // Initialize configuration
        AlternativeDataFlowIndexingFilterConfiguration xPathFilterConfiguration = (AlternativeDataFlowIndexingFilterConfiguration) unmarshaller
                .unmarshal(configReader);
        return xPathFilterConfiguration;

    } catch (JAXBException e) {
        log.error("Configuration initialization error!");
    }

    return null;
}

From source file:com.avira.couchdoop.CouchbaseArgs.java

License:Apache License

@Override
public void loadFromHadoopConfiguration(Configuration conf) throws ArgsException {
    String rawUrls = conf.get(ARG_COUCHBASE_URLS.getPropertyName());
    if (rawUrls != null) {
        urls = new ArrayList<>();
        String[] urlStrings = StringUtils.split(conf.get(ARG_COUCHBASE_URLS.getPropertyName()));
        for (String urlString : urlStrings) {
            urls.add(URI.create(urlString));
        }//w w w  .j  a  va2s. c  om
    }

    bucket = conf.get(ARG_COUCHBASE_BUCKET.getPropertyName());
    password = conf.get(ARG_COUCHBASE_PASSWORD.getPropertyName(), "");
}

From source file:com.avira.couchdoop.exp.ExportArgs.java

License:Apache License

@Override
public void loadFromHadoopConfiguration(Configuration conf) throws ArgsException {
    super.loadFromHadoopConfiguration(conf);

    input = conf.get(ARG_INPUT.getPropertyName());
    operation = getOperation(conf);// w ww.j  ava 2s .co m
    expiry = getExpiry(conf);
    fieldsDelimiter = conf.get(ARG_DELIMITER_FIELDS.getPropertyName(), "\t");
}

From source file:com.avira.couchdoop.exp.ExportArgs.java

License:Apache License

/**
 * Reads Couchbase store operation from the Hadoop configuration type.
 * @return Couchbase store operation to be used
 *//*w w w .j a v a 2s. c om*/
public static CouchbaseOperation getOperation(Configuration hadoopConfiguration) throws ArgsException {
    String strCouchbaseOperation = hadoopConfiguration.get(ARG_OPERATION.getPropertyName());

    // Default value
    if (strCouchbaseOperation == null) {
        return CouchbaseOperation.SET;
    }

    try {
        return CouchbaseOperation.valueOf(strCouchbaseOperation);
    } catch (IllegalArgumentException e) {
        throw new ArgsException("Unrecognized store type '" + strCouchbaseOperation
                + "'. Please provide one of the following: SET, ADD, REPLACE, APPEND, PREPEND and DELETE.", e);
    }
}

From source file:com.avira.couchdoop.exp.ExportArgs.java

License:Apache License

public static int getExpiry(Configuration hadoopConfiguration) throws ArgsException {
    String strExpiry = hadoopConfiguration.get(ARG_EXPIRY.getPropertyName());

    // Default value
    if (strExpiry == null) {
        return 0;
    }// w w w . ja va2 s  . com

    try {
        return Integer.parseInt(strExpiry);
    } catch (NumberFormatException e) {
        throw new ArgsException(
                "Unrecognized expiry value '" + strExpiry + "'. Please provide a positive integer.", e);
    }
}

From source file:com.avira.couchdoop.imp.ImportViewArgs.java

License:Apache License

@Override
public void loadFromHadoopConfiguration(Configuration conf) throws ArgsException {
    super.loadFromHadoopConfiguration(conf);

    designDocumentName = conf.get(ARG_DESIGNDOC_NAME.getPropertyName());
    viewName = conf.get(ARG_VIEW_NAME.getPropertyName());
    viewKeys = parseViewKeys(conf);/*from www . j  a v  a  2s  .  co  m*/
    output = conf.get(ARG_OUTPUT.getPropertyName());
    documentsPerPage = conf.getInt(ARG_DOCS_PER_PAGE.getPropertyName(), 1024);
    //numMappers default to the number of viewKeys
    numMappers = conf.getInt(ARG_NUM_MAPPERS.getPropertyName(), viewKeys.length);
}