List of usage examples for org.apache.hadoop.conf Configuration get
public String get(String name)
name
property, null
if no such property exists. From source file:com.aliyun.openservices.tablestore.hive.TableStoreInputFormat.java
License:Apache License
private static Configuration translateConfig(Configuration from) { Configuration to = new Configuration(); {//from w ww. ja va 2s . c o m com.aliyun.openservices.tablestore.hadoop.Credential cred = new com.aliyun.openservices.tablestore.hadoop.Credential( from.get(TableStoreConsts.ACCESS_KEY_ID), from.get(TableStoreConsts.ACCESS_KEY_SECRET), from.get(TableStoreConsts.SECURITY_TOKEN)); TableStore.setCredential(to, cred); } { String endpoint = from.get(TableStoreConsts.ENDPOINT); String instance = from.get(TableStoreConsts.INSTANCE); com.aliyun.openservices.tablestore.hadoop.Endpoint ep; if (instance == null) { ep = new com.aliyun.openservices.tablestore.hadoop.Endpoint(endpoint); } else { ep = new com.aliyun.openservices.tablestore.hadoop.Endpoint(endpoint, instance); } TableStore.setEndpoint(to, ep); } return to; }
From source file:com.aliyun.openservices.tablestore.hive.TableStoreInputFormat.java
License:Apache License
private boolean isHiveConfiguration(Configuration conf) { String endpoint = conf.get(TableStoreConsts.ENDPOINT); return endpoint != null; }
From source file:com.aliyun.openservices.tablestore.hive.TableStoreOutputFormat.java
License:Apache License
private static Configuration translateConfig(Configuration from) { Configuration to = new Configuration(); {/*from w ww .j a v a2s . c o m*/ Credential cred = new Credential(from.get(TableStoreConsts.ACCESS_KEY_ID), from.get(TableStoreConsts.ACCESS_KEY_SECRET), from.get(TableStoreConsts.SECURITY_TOKEN)); TableStore.setCredential(to, cred); } { String endpoint = from.get(TableStoreConsts.ENDPOINT); String instance = from.get(TableStoreConsts.INSTANCE); Endpoint ep; if (instance == null) { ep = new Endpoint(endpoint); } else { ep = new Endpoint(endpoint, instance); } TableStore.setEndpoint(to, ep); } { com.aliyun.openservices.tablestore.hadoop.TableStoreOutputFormat.setOutputTable(to, from.get(TableStoreConsts.TABLE_NAME)); } return to; }
From source file:com.ambiata.ivory.operation.hadoop.MultipleInputs.java
License:Apache License
/** * Add a {@link Path} with a custom {@link InputFormat} to the list of * inputs for the map-reduce job./* w ww.j av a 2 s .com*/ * * @param job The {@link Job} * @param path {@link Path} to be added to the list of inputs for the job * @param inputFormatClass {@link InputFormat} class to use for this path */ @SuppressWarnings("unchecked") public static void addInputPath(Job job, Path path, Class<? extends InputFormat> inputFormatClass) { /* WAS not encoded */ String inputFormatMapping = encode(path.toString() + ";" + inputFormatClass.getName()); Configuration conf = job.getConfiguration(); String inputFormats = conf.get(DIR_FORMATS); conf.set(DIR_FORMATS, inputFormats == null ? inputFormatMapping : inputFormats + "," + inputFormatMapping); job.setInputFormatClass(DelegatingInputFormat.class); }
From source file:com.ambiata.ivory.operation.hadoop.MultipleInputs.java
License:Apache License
/** * Add a {@link Path} with a custom {@link InputFormat} and * {@link Mapper} to the list of inputs for the map-reduce job. * * @param job The {@link Job}/*from w w w .jav a 2s . c o m*/ * @param path {@link Path} to be added to the list of inputs for the job * @param inputFormatClass {@link InputFormat} class to use for this path * @param mapperClass {@link Mapper} class to use for this path */ @SuppressWarnings("unchecked") public static void addInputPath(Job job, Path path, Class<? extends InputFormat> inputFormatClass, Class<? extends Mapper> mapperClass) { addInputPath(job, path, inputFormatClass); Configuration conf = job.getConfiguration(); /* WAS not encoded */ String mapperMapping = encode(path.toString() + ";" + mapperClass.getName()); String mappers = conf.get(DIR_MAPPERS); conf.set(DIR_MAPPERS, mappers == null ? mapperMapping : mappers + "," + mapperMapping); job.setMapperClass(DelegatingMapper.class); }
From source file:com.ambiata.ivory.operation.hadoop.MultipleInputs.java
License:Apache License
/** * Retrieves a map of {@link Path}s to the {@link InputFormat} class * that should be used for them.// w ww .j av a 2 s . c o m * * @param job The {@link JobContext} * @see #addInputPath(JobConf, Path, Class) * @return A map of paths to inputformats for the job */ @SuppressWarnings("unchecked") static Map<Path, InputFormat> getInputFormatMap(JobContext job) { Map<Path, InputFormat> m = new HashMap<Path, InputFormat>(); Configuration conf = job.getConfiguration(); String[] pathMappings = conf.get(DIR_FORMATS).split(","); for (String pathMappingEncoded : pathMappings) { /* WAS not decoded */ String pathMapping = decode(pathMappingEncoded); String[] split = pathMapping.split(";"); InputFormat inputFormat; try { inputFormat = (InputFormat) ReflectionUtils.newInstance(conf.getClassByName(split[1]), conf); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } m.put(new Path(split[0]), inputFormat); } return m; }
From source file:com.ambiata.ivory.operation.hadoop.MultipleInputs.java
License:Apache License
/** * Retrieves a map of {@link Path}s to the {@link Mapper} class that * should be used for them.//from w ww .java2s .c o m * * @param job The {@link JobContext} * @see #addInputPath(JobConf, Path, Class, Class) * @return A map of paths to mappers for the job */ @SuppressWarnings("unchecked") static Map<Path, Class<? extends Mapper>> getMapperTypeMap(JobContext job) { Configuration conf = job.getConfiguration(); if (conf.get(DIR_MAPPERS) == null) { return Collections.emptyMap(); } Map<Path, Class<? extends Mapper>> m = new HashMap<Path, Class<? extends Mapper>>(); String[] pathMappings = conf.get(DIR_MAPPERS).split(","); for (String pathMappingEncoded : pathMappings) { /* WAS not decoded */ String pathMapping = decode(pathMappingEncoded); String[] split = pathMapping.split(";"); Class<? extends Mapper> mapClass; try { mapClass = (Class<? extends Mapper>) conf.getClassByName(split[1]); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } m.put(new Path(split[0]), mapClass); } return m; }
From source file:com.amintor.hdfs.client.kerberizedhdfsclient.KerberizedHDFSClient.java
/** * @param args the command line arguments *///from w w w . ja v a2 s.c o m public static void main(String[] args) { try { Configuration conf = new Configuration(); conf.addResource(new FileInputStream(HDFS_SITE_LOCATION)); conf.addResource(new FileInputStream(CORE_SITE_LOCATION)); String authType = conf.get("hadoop.security.authentication"); System.out.println("Authentication Type:" + authType); if (authType.trim().equalsIgnoreCase("kerberos")) { // Login through UGI keytab UserGroupInformation.setConfiguration(conf); UserGroupInformation.loginUserFromKeytab("vijay", "/Users/vsingh/Software/vijay.keytab"); FileSystem hdFS = FileSystem.get(conf); FileStatus[] listStatus = hdFS.listStatus(new Path(args[0])); for (FileStatus statusFile : listStatus) { System.out.print("Replication:" + statusFile.getReplication() + "\t"); System.out.print("Owner:" + statusFile.getOwner() + "\t"); System.out.print("Group:" + statusFile.getGroup() + "\t"); System.out.println("Path:" + statusFile.getPath() + "\t"); } } } catch (IOException ex) { Logger.getLogger(KerberizedHDFSClient.class.getName()).log(Level.SEVERE, null, ex); } }
From source file:com.antsdb.saltedfish.storage.HBaseUtilMain.java
License:Open Source License
private void connectUseConfig(String optionValue) throws IOException { Configuration conf = HBaseConfiguration.create(); conf = HBaseConfiguration.create();/* w ww . ja v a 2 s . c o m*/ conf.addResource(new Path(optionValue)); println("Connecting to server %s ...", conf.get("hbase.zookeeper.quorum")); this.conn = ConnectionFactory.createConnection(conf); }
From source file:com.armon.test.quartz.QuartzConfiguration.java
License:Apache License
/** * Get the value of the <code>name</code> property as an <code>int</code>, possibly * referring to the deprecated name of the configuration property. * If no such property exists, the provided default value is returned, * or if the specified value is not a valid <code>int</code>, * then an error is thrown./*from w w w . ja v a 2 s . co m*/ * * @param name property name. * @param deprecatedName a deprecatedName for the property to use * if non-deprecated name is not used * @param defaultValue default value. * @throws NumberFormatException when the value is invalid * @return property value as an <code>int</code>, * or <code>defaultValue</code>. */ // TODO: developer note: This duplicates the functionality of deprecated // property support in Configuration in Hadoop 2. But since Hadoop-1 does not // contain these changes, we will do our own as usual. Replace these when H2 is default. public static int getInt(Configuration conf, String name, String deprecatedName, int defaultValue) { if (conf.get(deprecatedName) != null) { LOG.warn( String.format("Config option \"%s\" is deprecated. Instead, use \"%s\"", deprecatedName, name)); return conf.getInt(deprecatedName, defaultValue); } else { return conf.getInt(name, defaultValue); } }