List of usage examples for org.apache.hadoop.conf Configuration getClassByNameOrNull
public Class<?> getClassByNameOrNull(String name)
From source file:co.cask.cdap.internal.app.runtime.batch.dataset.input.DelegatingInputFormat.java
License:Apache License
@SuppressWarnings("unchecked") public List<InputSplit> getSplits(JobContext job) throws IOException, InterruptedException { List<InputSplit> splits = new ArrayList<>(); Map<String, MultipleInputs.MapperInput> mapperInputMap = MultipleInputs.getInputMap(job.getConfiguration()); for (Map.Entry<String, MultipleInputs.MapperInput> mapperInputEntry : mapperInputMap.entrySet()) { String inputName = mapperInputEntry.getKey(); MultipleInputs.MapperInput mapperInput = mapperInputEntry.getValue(); String mapperClassName = mapperInput.getMapperClassName(); Job jobCopy = new Job(job.getConfiguration()); Configuration confCopy = jobCopy.getConfiguration(); // set configuration specific for this input onto the jobCopy ConfigurationUtil.setAll(mapperInput.getInputFormatConfiguration(), confCopy); Class<?> inputFormatClass = confCopy.getClassByNameOrNull(mapperInput.getInputFormatClassName()); Preconditions.checkNotNull(inputFormatClass, "Class could not be found: ", mapperInput.getInputFormatClassName()); InputFormat inputFormat = (InputFormat) ReflectionUtils.newInstance(inputFormatClass, confCopy); // Get splits for each input path and tag with InputFormat // and Mapper types by wrapping in a TaggedInputSplit. List<InputSplit> formatSplits = inputFormat.getSplits(jobCopy); for (InputSplit split : formatSplits) { splits.add(new TaggedInputSplit(inputName, split, confCopy, mapperInput.getInputFormatConfiguration(), inputFormat.getClass(), mapperClassName)); }//from w w w . jav a2 s. c om } return splits; }
From source file:co.cask.cdap.internal.app.runtime.batch.dataset.input.MultiInputFormat.java
License:Apache License
@SuppressWarnings("unchecked") public List<InputSplit> getSplits(JobContext job) throws IOException, InterruptedException { List<InputSplit> splits = new ArrayList<>(); Map<String, MultipleInputs.MapperInput> mapperInputMap = MultipleInputs.getInputMap(job.getConfiguration()); for (Map.Entry<String, MultipleInputs.MapperInput> mapperInputEntry : mapperInputMap.entrySet()) { String inputName = mapperInputEntry.getKey(); MultipleInputs.MapperInput mapperInput = mapperInputEntry.getValue(); String mapperClassName = mapperInput.getMapperClassName(); Job jobCopy = new Job(job.getConfiguration()); Configuration confCopy = jobCopy.getConfiguration(); // set configuration specific for this input onto the jobCopy ConfigurationUtil.setAll(mapperInput.getInputFormatConfiguration(), confCopy); Class<?> inputFormatClass = confCopy.getClassByNameOrNull(mapperInput.getInputFormatClassName()); Preconditions.checkNotNull(inputFormatClass, "Class could not be found: ", mapperInput.getInputFormatClassName()); InputFormat<K, V> inputFormat = (InputFormat) ReflectionUtils.newInstance(inputFormatClass, confCopy); //some input format need a jobId to getSplits jobCopy.setJobID(new JobID(inputName, inputName.hashCode())); // Get splits for each input path and tag with InputFormat // and Mapper types by wrapping in a MultiInputTaggedSplit. List<InputSplit> formatSplits = inputFormat.getSplits(jobCopy); for (InputSplit split : formatSplits) { splits.add(new MultiInputTaggedSplit(split, confCopy, inputName, mapperInput.getInputFormatConfiguration(), inputFormat.getClass(), mapperClassName)); }//from w ww . j ava 2 s .c o m } return splits; }
From source file:co.cask.cdap.internal.app.runtime.batch.WrapperUtil.java
License:Apache License
static <T> T createDelegate(Configuration conf, String attrClass) { String delegateClassName = conf.get(attrClass); Class<?> delegateClass = conf.getClassByNameOrNull(delegateClassName); Preconditions.checkNotNull(delegateClass, "Class could not be found: ", delegateClassName); T delegate = (T) ReflectionUtils.newInstance(delegateClass, conf); if (!(delegate instanceof ProgramLifecycle)) { return delegate; }/*from www. j a v a2 s .c om*/ MapReduceClassLoader classLoader = MapReduceClassLoader.getFromConfiguration(conf); BasicMapReduceTaskContext basicMapReduceContext = classLoader.getTaskContextProvider().get(conf); ClassLoader programClassLoader = classLoader.getProgramClassLoader(); ClassLoader oldClassLoader = ClassLoaders.setContextClassLoader(programClassLoader); try { ProgramLifecycle programLifecycle = (ProgramLifecycle) delegate; programLifecycle.initialize(new MapReduceLifecycleContext(basicMapReduceContext)); // register it so that its destroy method can get called when the BasicMapReduceTaskContext is closed basicMapReduceContext.registerProgramLifecycle(programLifecycle); return delegate; } catch (Exception e) { LOG.error("Failed to initialize delegate with {}", basicMapReduceContext, e); throw Throwables.propagate(e); } finally { ClassLoaders.setContextClassLoader(oldClassLoader); } }
From source file:org.apache.nutch.net.protocols.ProtocolLogUtil.java
License:Apache License
@Override public void setConf(Configuration conf) { config = conf;/*from ww w. j av a2 s . com*/ for (String exceptClassName : conf.getTrimmedStrings(HTTP_LOG_SUPPRESSION, "java.net.UnknownHostException", "java.net.NoRouteToHostException")) { Class<?> clazz = conf.getClassByNameOrNull(exceptClassName); if (clazz == null) { LOG.warn("Class {} configured for log stack suppression not found.", exceptClassName); continue; } if (!Throwable.class.isAssignableFrom(clazz)) { LOG.warn("Class {} configured for log stack suppression does not extend Throwable.", exceptClassName); continue; } exceptionsLogShort.add(clazz.asSubclass(Throwable.class)); } }