List of usage examples for org.apache.hadoop.conf Configuration get
public String get(String name)
name
property, null
if no such property exists. From source file:co.cask.cdap.internal.app.runtime.batch.distributed.MapReduceContainerHelper.java
License:Apache License
/** * Returns a list of path to be used for the MapReduce framework classpath. * * @param hConf the configuration for the job. * @param result a list for appending MR framework classpath * @return the same {@code result} list from the argument *//* w w w . jav a2 s . c om*/ public static List<String> getMapReduceClassPath(Configuration hConf, List<String> result) { String framework = hConf.get(MRJobConfig.MAPREDUCE_APPLICATION_FRAMEWORK_PATH); // For classpath config get from the hConf, we splits it with both "," and ":" because one can set // the conf with something like "path1,path2:path3" and // it should become "path1:path2:path3" in the target JVM process Splitter splitter = Splitter.on(Pattern.compile(",|" + File.pathSeparatorChar)).trimResults() .omitEmptyStrings(); // If MR framework is non specified, use yarn.application.classpath and mapreduce.application.classpath // Otherwise, only use the mapreduce.application.classpath if (framework == null) { String yarnClassPath = hConf.get(YarnConfiguration.YARN_APPLICATION_CLASSPATH, Joiner.on(",").join(YarnConfiguration.DEFAULT_YARN_APPLICATION_CLASSPATH)); Iterables.addAll(result, splitter.split(yarnClassPath)); } // Add MR application classpath Iterables.addAll(result, splitter.split(hConf.get(MRJobConfig.MAPREDUCE_APPLICATION_CLASSPATH, MRJobConfig.DEFAULT_MAPREDUCE_APPLICATION_CLASSPATH))); return result; }
From source file:co.cask.cdap.internal.app.runtime.batch.distributed.MapReduceContainerHelper.java
License:Apache License
/** * Gets the MapReduce framework URI based on the {@code mapreduce.application.framework.path} setting. * * @param hConf the job configuration//w ww .j av a2 s.c om * @return the framework URI or {@code null} if not present or if the URI in the config is invalid. */ @Nullable public static URI getFrameworkURI(Configuration hConf) { String framework = hConf.get(MRJobConfig.MAPREDUCE_APPLICATION_FRAMEWORK_PATH); if (framework == null) { return null; } try { // Parse the path. It can contains '#' to represent the localized file name URI uri = new URI(framework); String linkName = uri.getFragment(); // The following resolution logic is copied from JobSubmitter in MR. FileSystem fs = FileSystem.get(hConf); Path frameworkPath = fs.makeQualified(new Path(uri.getScheme(), uri.getAuthority(), uri.getPath())); FileContext fc = FileContext.getFileContext(frameworkPath.toUri(), hConf); frameworkPath = fc.resolvePath(frameworkPath); uri = frameworkPath.toUri(); // If doesn't have localized name (in the URI fragment), then use the last part of the URI path as name if (linkName == null) { linkName = uri.getPath(); int idx = linkName.lastIndexOf('/'); if (idx >= 0) { linkName = linkName.substring(idx + 1); } } return new URI(uri.getScheme(), uri.getAuthority(), uri.getPath(), null, linkName); } catch (URISyntaxException e) { LOG.warn("Failed to parse {} as a URI. MapReduce framework path is not used. Check the setting for {}.", framework, MRJobConfig.MAPREDUCE_APPLICATION_FRAMEWORK_PATH, e); } catch (IOException e) { LOG.warn("Failed to resolve {} URI. MapReduce framework path is not used. Check the setting for {}.", framework, MRJobConfig.MAPREDUCE_APPLICATION_FRAMEWORK_PATH, e); } return null; }
From source file:co.cask.cdap.internal.app.runtime.batch.MapperWrapper.java
License:Apache License
/** * Retrieves the class name of the wrapped mapper class from a Job's configuration. * * @param conf The conf from which to get the wrapped class. * @return the class name of the wrapped Mapper class *//*from www .j a v a 2 s . c o m*/ public static String getWrappedMapper(Configuration conf) { String wrappedMapperClassName = conf.get(MapperWrapper.ATTR_MAPPER_CLASS); Preconditions.checkNotNull(wrappedMapperClassName, "Wrapped mapper class could not be found."); return wrappedMapperClassName; }
From source file:co.cask.cdap.internal.app.runtime.batch.ReducerWrapper.java
License:Apache License
/** * Wraps the mapper defined in the job with this {@link MapperWrapper} if it is defined. * @param job The MapReduce job/*from www.j a va2s .c om*/ */ public static void wrap(Job job) { // NOTE: we don't use job.getReducerClass() as we don't need to load user class here Configuration conf = job.getConfiguration(); String reducerClass = conf.get(MRJobConfig.REDUCE_CLASS_ATTR); if (reducerClass != null) { conf.set(ReducerWrapper.ATTR_REDUCER_CLASS, reducerClass); job.setReducerClass(ReducerWrapper.class); } }
From source file:co.cask.cdap.internal.app.runtime.batch.WrapperUtil.java
License:Apache License
static <T> T createDelegate(Configuration conf, String attrClass) { String delegateClassName = conf.get(attrClass); Class<?> delegateClass = conf.getClassByNameOrNull(delegateClassName); Preconditions.checkNotNull(delegateClass, "Class could not be found: ", delegateClassName); T delegate = (T) ReflectionUtils.newInstance(delegateClass, conf); if (!(delegate instanceof ProgramLifecycle)) { return delegate; }/*from ww w.j a v a2s .c o m*/ MapReduceClassLoader classLoader = MapReduceClassLoader.getFromConfiguration(conf); BasicMapReduceTaskContext basicMapReduceContext = classLoader.getTaskContextProvider().get(conf); ClassLoader programClassLoader = classLoader.getProgramClassLoader(); ClassLoader oldClassLoader = ClassLoaders.setContextClassLoader(programClassLoader); try { ProgramLifecycle programLifecycle = (ProgramLifecycle) delegate; programLifecycle.initialize(new MapReduceLifecycleContext(basicMapReduceContext)); // register it so that its destroy method can get called when the BasicMapReduceTaskContext is closed basicMapReduceContext.registerProgramLifecycle(programLifecycle); return delegate; } catch (Exception e) { LOG.error("Failed to initialize delegate with {}", basicMapReduceContext, e); throw Throwables.propagate(e); } finally { ClassLoaders.setContextClassLoader(oldClassLoader); } }
From source file:co.cask.cdap.internal.app.runtime.batch.WrapperUtil.java
License:Apache License
static boolean setIfDefined(Job job, String srcKey, String destinationKey) { // NOTE: we don't use job.getXClass or conf.getClass as we don't need to load user class here Configuration conf = job.getConfiguration(); String srcVal = conf.get(srcKey); if (srcVal != null) { conf.set(destinationKey, srcVal); return true; }//from www.jav a 2 s .c o m return false; }
From source file:co.cask.cdap.internal.app.runtime.spark.dataset.SparkDatasetInputFormat.java
License:Apache License
@Override public List<InputSplit> getSplits(final JobContext context) throws IOException, InterruptedException { ExecutionSparkContext sparkContext = SparkContextProvider.getSparkContext(); Configuration configuration = context.getConfiguration(); Map<String, String> arguments = GSON.fromJson(configuration.get(INPUT_DATASET_ARGS), ARGS_TYPE); BatchReadable<?, ?> batchReadable = sparkContext.getBatchReadable(configuration.get(INPUT_DATASET_NAME), arguments);//from w w w . j av a2s . co m List<Split> splits = batchReadable.getSplits(); List<InputSplit> list = new ArrayList<>(splits.size()); for (Split split : splits) { list.add(new DataSetInputSplit(split)); } return list; }
From source file:co.cask.cdap.internal.app.runtime.spark.dataset.SparkDatasetInputFormat.java
License:Apache License
private BatchReadable<KEY, VALUE> getBatchReadable(Configuration configuration) { Map<String, String> args = GSON.fromJson(configuration.get(INPUT_DATASET_ARGS), ARGS_TYPE); return SparkContextProvider.getSparkContext().getBatchReadable(configuration.get(INPUT_DATASET_NAME), args); }
From source file:co.cask.cdap.internal.app.runtime.spark.dataset.SparkDatasetOutputFormat.java
License:Apache License
private <K, V> CloseableBatchWritable<K, V> getBatchWritable(Configuration configuration) { Map<String, String> args = GSON.fromJson(configuration.get(OUTPUT_DATASET_ARGS), ARGS_TYPE); return SparkContextProvider.getSparkContext().getBatchWritable(configuration.get(OUTPUT_DATASET_NAME), args);// w ww .j a va2s. c o m }
From source file:co.cask.cdap.metrics.data.MetricsTestHelper.java
License:Apache License
public static MetricsTableFactory createHBaseMetricsTableFactory(Configuration hConf) throws DatasetManagementException { CConfiguration cConf = CConfiguration.create(); String zkConnectStr = hConf.get(HConstants.ZOOKEEPER_QUORUM) + ":" + hConf.get(HConstants.ZOOKEEPER_CLIENT_PORT); cConf.set(Constants.Zookeeper.QUORUM, zkConnectStr); cConf.set(MetricsConstants.ConfigKeys.TIME_SERIES_TABLE_ROLL_TIME, "300"); cConf.set(Constants.CFG_HDFS_USER, System.getProperty("user.name")); Injector injector = Guice.createInjector(new ConfigModule(cConf, hConf), new DiscoveryRuntimeModule().getDistributedModules(), new ZKClientModule(), new LocationRuntimeModule().getDistributedModules(), new DataFabricDistributedModule(), new TransactionMetricsModule(), new AbstractModule() { @Override/*from w ww. j a va 2s .c o m*/ protected void configure() { install(new FactoryModuleBuilder() .implement(DatasetDefinitionRegistry.class, DefaultDatasetDefinitionRegistry.class) .build(DatasetDefinitionRegistryFactory.class)); } }); DatasetFramework dsFramework = new InMemoryDatasetFramework( injector.getInstance(DatasetDefinitionRegistryFactory.class)); dsFramework.addModule("metrics-hbase", new HBaseMetricsTableModule()); return new DefaultMetricsTableFactory(cConf, dsFramework); }