List of usage examples for org.apache.hadoop.conf Configuration getClassByName
public Class<?> getClassByName(String name) throws ClassNotFoundException
From source file:com.hortonworks.hbase.replication.bridge.HBaseServer.java
License:Apache License
static Class<? extends VersionedProtocol> getProtocolClass(String protocolName, Configuration conf) throws ClassNotFoundException { Class<? extends VersionedProtocol> protocol = PROTOCOL_CACHE.get(protocolName); if (protocol == null) { protocol = (Class<? extends VersionedProtocol>) conf.getClassByName(protocolName); PROTOCOL_CACHE.put(protocolName, protocol); }//ww w . jav a2 s.c o m return protocol; }
From source file:com.inmobi.databus.readers.DatabusStreamReader.java
License:Apache License
protected DatabusStreamReader(PartitionId partitionId, FileSystem fs, Path streamDir, String inputFormatClass, Configuration conf, long waitTimeForFileCreate, PartitionReaderStatsExposer metrics, boolean noNewFiles, Date stopTime) throws IOException { super(partitionId, fs, streamDir, waitTimeForFileCreate, metrics, noNewFiles, stopTime); this.conf = conf; try {//from w ww . j a v a 2 s.c o m input = (InputFormat<Object, Object>) ReflectionUtils.newInstance(conf.getClassByName(inputFormatClass), conf); } catch (ClassNotFoundException e) { throw new IllegalArgumentException("Input format class" + inputFormatClass + " not found", e); } }
From source file:com.m6d.hive.protobuf.ProtobufDeserializer.java
License:Apache License
@Override public void initialize(Configuration job, Properties tbl) throws SerDeException { try {/* ww w .j ava2 s. c o m*/ String keyClassName = tbl.getProperty(KEY_SERIALIZE_CLASS); if (keyClassName != null) { keyClass = job.getClassByName(keyClassName); parseFrom = keyClass.getMethod(PARSE_FROM, byteArrayParameters); } String valueClassName = tbl.getProperty(VALUE_SERIALIZE_CLASS); if (valueClassName != null) { valueClass = job.getClassByName(valueClassName); vparseFrom = valueClass.getMethod(PARSE_FROM, byteArrayParameters); } this.oi = buildObjectInspector(); } catch (Exception ex) { throw new SerDeException(ex.getMessage(), ex); } }
From source file:com.mongodb.hadoop.input.MongoMultipleInputs.java
License:Apache License
/** * Retrieves a map of {@link Path}s to the {@link InputFormat} class * that should be used for them./* w w w. ja v a2s.co m*/ * * @param job The {@link JobContext} * @see #addInputPath(JobConf, Path, Class) * @return A map of paths to inputformats for the job */ @SuppressWarnings("unchecked") static Map<Path, InputFormat> getInputFormatMap(JobContext job) { Map<Path, InputFormat> m = new HashMap<Path, InputFormat>(); Configuration conf = job.getConfiguration(); List<MongoRequest> mongoRequests = MongoConfigUtil.getMongoRequests(conf); for (MongoRequest mongoRequest : mongoRequests) { InputFormat inputFormat; try { inputFormat = (InputFormat) ReflectionUtils .newInstance(conf.getClassByName(mongoRequest.getInputFormat()), conf); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } m.put(new Path(mongoRequest.getInputURI().toString()), inputFormat); } return m; }
From source file:com.mongodb.hadoop.input.MongoMultipleInputs.java
License:Apache License
/** * Retrieves a map of {@link Path}s to the {@link Mapper} class that * should be used for them.// w w w. j a v a 2 s .c om * * @param job The {@link JobContext} * @see #addInputPath(JobConf, Path, Class, Class) * @return A map of paths to mappers for the job */ @SuppressWarnings("unchecked") static Map<Path, Class<? extends Mapper>> getMapperTypeMap(JobContext job) { Configuration conf = job.getConfiguration(); List<MongoRequest> mongoRequests = MongoConfigUtil.getMongoRequests(conf); if (mongoRequests == null) { return Collections.emptyMap(); } Map<Path, Class<? extends Mapper>> m = new HashMap<Path, Class<? extends Mapper>>(); for (MongoRequest mongoRequest : mongoRequests) { Class<? extends Mapper> mapClass; try { mapClass = (Class<? extends Mapper>) conf.getClassByName(mongoRequest.getMapper()); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } m.put(new Path(mongoRequest.getInputURI().toString()), mapClass); } return m; }
From source file:com.mongodb.hadoop.util.MapredMongoConfigUtil.java
License:Apache License
/** * Fetch a class by its actual class name, rather than by a key name in the configuration properties. We still need to pass in a * Configuration object here, since the Configuration class maintains an internal cache of class names for performance on some hadoop * versions. It also ensures that the same classloader is used across all keys. *//*from w ww.j a va 2 s .c o m*/ public static <U> Class<? extends U> getClassByName(final Configuration conf, final String className, final Class<U> xface) { if (className == null) { return null; } try { Class<?> theClass = conf.getClassByName(className); if (theClass != null && !xface.isAssignableFrom(theClass)) { throw new RuntimeException(theClass + " not " + xface.getName()); } else if (theClass != null) { return theClass.asSubclass(xface); } else { return null; } } catch (Exception e) { throw new RuntimeException(e); } }
From source file:com.nebulousnews.io.ObjectSerializableWritable.java
License:Apache License
/** Read a {@link Writable}, {@link String}, primitive type, or an array of * the preceding. */// ww w.ja v a2 s .c o m @SuppressWarnings("unchecked") public static Object readObject(DataInput in, ObjectSerializableWritable objectWritable, Configuration conf) throws IOException { String className = UTF8.readString(in); Class<?> declaredClass = PRIMITIVE_NAMES.get(className); if (declaredClass == null) { try { declaredClass = conf.getClassByName(className); } catch (ClassNotFoundException e) { throw new RuntimeException("readObject can't find class " + className, e); } } Object instance; if (declaredClass.isPrimitive()) { // primitive types if (declaredClass == Boolean.TYPE) { // boolean instance = Boolean.valueOf(in.readBoolean()); } else if (declaredClass == Character.TYPE) { // char instance = Character.valueOf(in.readChar()); } else if (declaredClass == Byte.TYPE) { // byte instance = Byte.valueOf(in.readByte()); } else if (declaredClass == Short.TYPE) { // short instance = Short.valueOf(in.readShort()); } else if (declaredClass == Integer.TYPE) { // int instance = Integer.valueOf(in.readInt()); } else if (declaredClass == Long.TYPE) { // long instance = Long.valueOf(in.readLong()); } else if (declaredClass == Float.TYPE) { // float instance = Float.valueOf(in.readFloat()); } else if (declaredClass == Double.TYPE) { // double instance = Double.valueOf(in.readDouble()); } else if (declaredClass == Void.TYPE) { // void instance = null; } else { throw new IllegalArgumentException("Not a primitive: " + declaredClass); } } else if (declaredClass.isArray()) { // array int length = in.readInt(); instance = Array.newInstance(declaredClass.getComponentType(), length); for (int i = 0; i < length; i++) { Array.set(instance, i, readObject(in, conf)); } } else if (declaredClass == String.class) { // String instance = UTF8.readString(in); } else if (declaredClass.isEnum()) { // enum instance = Enum.valueOf((Class<? extends Enum>) declaredClass, UTF8.readString(in)); } else if (Serializable.class.isAssignableFrom(declaredClass)) { //Serializable ObjectInputStream input = new ObjectInputStream((InputStream) in); try { instance = input.readObject(); } catch (ClassNotFoundException e) { // TODO Auto-generated catch block throw new IllegalArgumentException("ClassNotFound: " + declaredClass); } } else { // Writable (or fail, this is dangerous) Class instanceClass = null; String str = ""; try { str = UTF8.readString(in); instanceClass = conf.getClassByName(str); } catch (ClassNotFoundException e) { throw new RuntimeException("readObject can't find class " + str, e); } Writable writable = WritableFactories.newInstance(instanceClass, conf); writable.readFields(in); instance = writable; if (instanceClass == NullInstance.class) { // null declaredClass = ((NullInstance) instance).declaredClass; instance = null; } } if (objectWritable != null) { // store values objectWritable.declaredClass = declaredClass; objectWritable.instance = instance; } return instance; }
From source file:com.scaleoutsoftware.soss.hserver.hadoop.SubmittedJob.java
License:Apache License
@SuppressWarnings("unchecked") private static <T> T getSplitDetails(FSDataInputStream inFile, long offset, Configuration configuration) throws IOException { inFile.seek(offset);// www. j av a 2 s. c o m String className = StringInterner.weakIntern(Text.readString(inFile)); Class<T> cls; try { cls = (Class<T>) configuration.getClassByName(className); } catch (ClassNotFoundException ce) { IOException wrap = new IOException("Split class " + className + " not found"); wrap.initCause(ce); throw wrap; } SerializationFactory factory = new SerializationFactory(configuration); Deserializer<T> deserializer = (Deserializer<T>) factory.getDeserializer(cls); deserializer.open(inFile); T split = deserializer.deserialize(null); return split; }
From source file:com.yahoo.omid.committable.hbase.RegionSplitter.java
License:Apache License
/** * @throws IOException//from w ww .j a v a2 s . com * if the specified SplitAlgorithm class couldn't be instantiated */ public static SplitAlgorithm newSplitAlgoInstance(Configuration conf, String splitClassName) throws IOException { Class<?> splitClass; // For split algorithms builtin to RegionSplitter, the user can specify // their simple class name instead of a fully qualified class name. if (splitClassName.equals(UniformSplit.class.getSimpleName())) { splitClass = UniformSplit.class; } else { try { splitClass = conf.getClassByName(splitClassName); } catch (ClassNotFoundException e) { throw new IOException("Couldn't load split class " + splitClassName, e); } if (splitClass == null) { throw new IOException("Failed loading split class " + splitClassName); } if (!SplitAlgorithm.class.isAssignableFrom(splitClass)) { throw new IOException("Specified split class doesn't implement SplitAlgorithm"); } } try { return splitClass.asSubclass(SplitAlgorithm.class).newInstance(); } catch (Exception e) { throw new IOException("Problem loading split algorithm: ", e); } }
From source file:de.tudarmstadt.ukp.dkpro.c4corpus.hadoop.io.WARCFileWriter.java
License:Apache License
/** * Instantiates a Hadoop codec for compressing and decompressing Gzip files. This is the * most common compression applied to WARC files. * * @param conf The Hadoop configuration. * @return codec instance/*from w w w . j a v a 2s. c om*/ */ public static CompressionCodec getGzipCodec(Configuration conf) { try { return ReflectionUtils.newInstance(conf.getClassByName("org.apache.hadoop.io.compress.GzipCodec") .asSubclass(CompressionCodec.class), conf); } catch (ClassNotFoundException e) { logger.warn("GzipCodec could not be instantiated", e); return null; } }