List of usage examples for org.apache.hadoop.conf Configuration getClassByName
public Class<?> getClassByName(String name) throws ClassNotFoundException
From source file:com.ambiata.ivory.operation.hadoop.MultipleInputs.java
License:Apache License
/** * Retrieves a map of {@link Path}s to the {@link InputFormat} class * that should be used for them.//from w w w.j a v a 2 s.c o m * * @param job The {@link JobContext} * @see #addInputPath(JobConf, Path, Class) * @return A map of paths to inputformats for the job */ @SuppressWarnings("unchecked") static Map<Path, InputFormat> getInputFormatMap(JobContext job) { Map<Path, InputFormat> m = new HashMap<Path, InputFormat>(); Configuration conf = job.getConfiguration(); String[] pathMappings = conf.get(DIR_FORMATS).split(","); for (String pathMappingEncoded : pathMappings) { /* WAS not decoded */ String pathMapping = decode(pathMappingEncoded); String[] split = pathMapping.split(";"); InputFormat inputFormat; try { inputFormat = (InputFormat) ReflectionUtils.newInstance(conf.getClassByName(split[1]), conf); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } m.put(new Path(split[0]), inputFormat); } return m; }
From source file:com.ambiata.ivory.operation.hadoop.MultipleInputs.java
License:Apache License
/** * Retrieves a map of {@link Path}s to the {@link Mapper} class that * should be used for them./*from w w w . j a v a 2 s .co m*/ * * @param job The {@link JobContext} * @see #addInputPath(JobConf, Path, Class, Class) * @return A map of paths to mappers for the job */ @SuppressWarnings("unchecked") static Map<Path, Class<? extends Mapper>> getMapperTypeMap(JobContext job) { Configuration conf = job.getConfiguration(); if (conf.get(DIR_MAPPERS) == null) { return Collections.emptyMap(); } Map<Path, Class<? extends Mapper>> m = new HashMap<Path, Class<? extends Mapper>>(); String[] pathMappings = conf.get(DIR_MAPPERS).split(","); for (String pathMappingEncoded : pathMappings) { /* WAS not decoded */ String pathMapping = decode(pathMappingEncoded); String[] split = pathMapping.split(";"); Class<? extends Mapper> mapClass; try { mapClass = (Class<? extends Mapper>) conf.getClassByName(split[1]); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } m.put(new Path(split[0]), mapClass); } return m; }
From source file:com.asakusafw.bridge.hadoop.directio.DirectFileInputFormat.java
License:Apache License
@SuppressWarnings("unchecked") private static <T> Class<T> extractClass(Configuration conf, String key, boolean mandatory) { String value = extract(conf, key, mandatory, false); if (value == null) { return null; }/*from w w w.j a v a 2 s . c o m*/ try { return (Class<T>) conf.getClassByName(value); } catch (ClassNotFoundException e) { throw new IllegalStateException(MessageFormat.format("failed to resolve a class: {0}={1}", key, value), e); } }
From source file:com.asakusafw.bridge.hadoop.directio.Util.java
License:Apache License
static DataDefinition<?> readDataDefinition(DataInput in, Map<String, String> batchArguments, Configuration conf) throws IOException { Class<?> data;/*from w w w. j av a 2s . c om*/ DataFormat<?> format; DataFilter<?> filter; try { data = conf.getClassByName(Text.readString(in)); format = (DataFormat<?>) ReflectionUtils.newInstance(conf.getClassByName(Text.readString(in)), conf); String filterClass = Text.readString(in); if (filterClass.isEmpty()) { filter = null; } else { filter = createFilter(conf.getClassByName(filterClass), batchArguments, conf); } } catch (ReflectiveOperationException e) { throw new IOException("error occurred while extracting data definition", e); } return SimpleDataDefinition.newInstance(data, format, filter); }
From source file:com.asakusafw.runtime.directio.hadoop.HadoopDataSourceUtil.java
License:Apache License
/** * Loads a profile list from the configuration. * @param conf target configuration//from w ww . j a v a 2s. c o m * @return the restored profile list * @throws IllegalArgumentException if some parameters were {@code null} */ public static List<DirectDataSourceProfile> loadProfiles(Configuration conf) { if (conf == null) { throw new IllegalArgumentException("conf must not be null"); //$NON-NLS-1$ } Map<String, String> pathToKey = new HashMap<>(); Map<String, String> map = getConfigMap(conf); Set<String> keys = getChildKeys(map, "."); //$NON-NLS-1$ try { List<DirectDataSourceProfile> results = new ArrayList<>(); for (String key : keys) { String className = map.get(key); Map<String, String> config = createPrefixMap(map, key + "."); //$NON-NLS-1$ String path = config.remove(KEY_PATH); if (path == null) { throw new IllegalStateException( MessageFormat.format("Missing I/O configuration: {0}", PREFIX + key + '.' + KEY_PATH)); } path = normalizePath(path); if (pathToKey.containsKey(path)) { throw new IllegalStateException(MessageFormat.format( "Path mapping is duplicated: {0} ({1} <=> {2})", path.isEmpty() ? "/" : path, //$NON-NLS-2$ PREFIX + key + '.' + KEY_PATH, PREFIX + pathToKey.get(key) + '.' + KEY_PATH)); } else { pathToKey.put(path, key); } Class<? extends AbstractDirectDataSource> aClass = conf.getClassByName(className) .asSubclass(AbstractDirectDataSource.class); results.add(new DirectDataSourceProfile(key, aClass, path, config)); } return results; } catch (ClassNotFoundException e) { throw new IllegalStateException(e); } }
From source file:com.asakusafw.runtime.stage.input.StageInputDriver.java
License:Apache License
@SuppressWarnings("rawtypes") private static List<StageInput> decode(Configuration conf, String encoded) throws IOException, ClassNotFoundException { assert conf != null; assert encoded != null; ByteArrayInputStream source = new ByteArrayInputStream(encoded.getBytes(ASCII)); DataInputStream input = new DataInputStream(new GZIPInputStream(new Base64InputStream(source))); long version = WritableUtils.readVLong(input); if (version != SERIAL_VERSION) { throw new IOException(MessageFormat.format("Invalid StageInput version: framework={0}, saw={1}", SERIAL_VERSION, version)); }/*from w w w . j a v a 2 s . c o m*/ String[] dictionary = WritableUtils.readStringArray(input); int inputListSize = WritableUtils.readVInt(input); List<StageInput> results = new ArrayList<>(); for (int inputListIndex = 0; inputListIndex < inputListSize; inputListIndex++) { String pathString = readEncoded(input, dictionary); String formatName = readEncoded(input, dictionary); String mapperName = readEncoded(input, dictionary); int attributeCount = WritableUtils.readVInt(input); Map<String, String> attributes = new HashMap<>(); for (int attributeIndex = 0; attributeIndex < attributeCount; attributeIndex++) { String keyString = readEncoded(input, dictionary); String valueString = readEncoded(input, dictionary); attributes.put(keyString, valueString); } Class<? extends InputFormat> formatClass = conf.getClassByName(formatName) .asSubclass(InputFormat.class); Class<? extends Mapper> mapperClass = conf.getClassByName(mapperName).asSubclass(Mapper.class); results.add(new StageInput(pathString, formatClass, mapperClass, attributes)); } return results; }
From source file:com.asakusafw.runtime.stage.input.StageInputFormat.java
License:Apache License
/** * Returns the {@link SplitCombiner} class used in the current job. * @param context the current job context * @return the {@link SplitCombiner} class * @since 0.7.1// ww w . j a v a 2s . c o m */ public static Class<? extends SplitCombiner> getSplitCombinerClass(JobContext context) { if (context == null) { throw new IllegalArgumentException("context must not be null"); //$NON-NLS-1$ } Configuration conf = context.getConfiguration(); String combinerType = conf.get(KEY_SPLIT_COMBINER, DEFAULT_SPLIT_COMBINER); if (JobCompatibility.isLocalMode(context) && combinerType.equals(DEFAULT_SPLIT_COMBINER)) { return ExtremeSplitCombiner.class; } Class<? extends SplitCombiner> defined = SPLIT_COMBINERS.get(combinerType); if (defined != null) { return defined; } try { return conf.getClassByName(combinerType).asSubclass(SplitCombiner.class); } catch (Exception e) { LOG.warn(MessageFormat.format("failed to load a combiner \"{0}\"", combinerType), e); return IdentitySplitCombiner.class; } }
From source file:com.baynote.kafka.hadoop.MultipleKafkaInputFormat.java
License:Apache License
/** * Returns a {@link List} containing <em>all</em> of the topic-group-{@link Mapper} combinations added via * {@link #addTopic(Job, String, String, Class)}. * /* w w w. jav a 2 s. c o m*/ * @param conf * the conf for this job. * @return all of the configured {@link TopicConf}s */ @SuppressWarnings("unchecked") public static List<TopicConf> getTopics(final Configuration conf) { final List<TopicConf> result = Lists.newArrayList(); for (final String topicConf : conf.get(TOPICS_CONF).split(";")) { final String[] topicConfTokens = topicConf.split(","); final String topic = topicConfTokens[0]; final String group = topicConfTokens[1]; final Class<? extends Mapper> mapper; try { mapper = (Class<? extends Mapper>) conf.getClassByName(topicConfTokens[2]); } catch (final ClassNotFoundException e) { throw new RuntimeException(e); } result.add(new TopicConf(topic, group, mapper)); } return result; }
From source file:com.bonc.mr_roamRecognition_hjpt.comm.NewFileOutputFormat.java
License:Apache License
/** * Get the {@link CompressionCodec} for compressing the job outputs. * //from w w w . ja v a2 s. co m * @param job * the {@link Job} to look in * @param defaultValue * the {@link CompressionCodec} to return if not set * @return the {@link CompressionCodec} to be used to compress the job * outputs * @throws IllegalArgumentException * if the class was specified, but not found */ public static Class<? extends CompressionCodec> getOutputCompressorClass(JobContext job, Class<? extends CompressionCodec> defaultValue) { Class<? extends CompressionCodec> codecClass = defaultValue; Configuration conf = job.getConfiguration(); String name = conf.get(FileOutputFormat.COMPRESS_CODEC); if (name != null) { try { codecClass = conf.getClassByName(name).asSubclass(CompressionCodec.class); } catch (ClassNotFoundException e) { throw new IllegalArgumentException("Compression codec " + name + " was not found.", e); } } return codecClass; }
From source file:com.cloudera.crunch.impl.mr.run.CrunchInputs.java
License:Apache License
public static Map<Class<? extends InputFormat>, Map<Integer, List<Path>>> getFormatNodeMap(JobContext job) { Map<Class<? extends InputFormat>, Map<Integer, List<Path>>> formatNodeMap = Maps.newHashMap(); Configuration conf = job.getConfiguration(); for (String input : Splitter.on(RECORD_SEP).split(conf.get(RuntimeParameters.MULTI_INPUTS))) { List<String> fields = ImmutableList.copyOf(SPLITTER.split(input)); Class<? extends InputFormat> inputFormatClass; try {/*from ww w . j a v a 2 s .c o m*/ inputFormatClass = (Class<? extends InputFormat>) conf.getClassByName(fields.get(0)); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } if (!formatNodeMap.containsKey(inputFormatClass)) { formatNodeMap.put(inputFormatClass, Maps.<Integer, List<Path>>newHashMap()); } Integer nodeIndex = Integer.valueOf(fields.get(1)); if (!formatNodeMap.get(inputFormatClass).containsKey(nodeIndex)) { formatNodeMap.get(inputFormatClass).put(nodeIndex, Lists.<Path>newLinkedList()); } formatNodeMap.get(inputFormatClass).get(nodeIndex).add(new Path(fields.get(2))); } return formatNodeMap; }