Example usage for org.apache.hadoop.conf Configuration getClass

List of usage examples for org.apache.hadoop.conf Configuration getClass

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration getClass.

Prototype

public Class<?> getClass(String name, Class<?> defaultValue) 

Source Link

Document

Get the value of the name property as a Class.

Usage

From source file:edu.uci.ics.pregelix.api.util.BspUtils.java

License:Apache License

/**
 * Get the user's subclassed vertex index class.
 * //www .java 2s . c  o  m
 * @param conf
 *            Configuration to check
 * @return User's vertex index class
 */
@SuppressWarnings("unchecked")
public static <I extends Writable> Class<I> getVertexIndexClass(Configuration conf) {
    if (conf == null)
        conf = defaultConf;
    return (Class<I>) conf.getClass(PregelixJob.VERTEX_INDEX_CLASS, WritableComparable.class);
}

From source file:edu.uci.ics.pregelix.api.util.BspUtils.java

License:Apache License

/**
 * Get the user's subclassed vertex value class.
 * /*from w w  w . java2s.  com*/
 * @param conf
 *            Configuration to check
 * @return User's vertex value class
 */
@SuppressWarnings("unchecked")
public static <V extends Writable> Class<V> getVertexValueClass(Configuration conf) {
    return (Class<V>) conf.getClass(PregelixJob.VERTEX_VALUE_CLASS, Writable.class);
}

From source file:edu.uci.ics.pregelix.api.util.BspUtils.java

License:Apache License

/**
 * Get the user's subclassed edge value class.
 * /*from w w w. j  a va 2  s . com*/
 * @param conf
 *            Configuration to check
 * @return User's vertex edge value class
 */
@SuppressWarnings("unchecked")
public static <E extends Writable> Class<E> getEdgeValueClass(Configuration conf) {
    return (Class<E>) conf.getClass(PregelixJob.EDGE_VALUE_CLASS, Writable.class);
}

From source file:edu.uci.ics.pregelix.api.util.BspUtils.java

License:Apache License

/**
 * Get the user's subclassed vertex message value class.
 * /*from  www  .ja  v  a2 s  . c  om*/
 * @param conf
 *            Configuration to check
 * @return User's vertex message value class
 */
@SuppressWarnings("unchecked")
public static <M extends Writable> Class<M> getMessageValueClass(Configuration conf) {
    if (conf == null)
        conf = defaultConf;
    return (Class<M>) conf.getClass(PregelixJob.MESSAGE_VALUE_CLASS, Writable.class);
}

From source file:edu.uci.ics.pregelix.api.util.BspUtils.java

License:Apache License

/**
 * Get the user's subclassed global aggregator's partial aggregate value class.
 * /*  w  w  w. j a v  a2s  . co  m*/
 * @param conf
 *            Configuration to check
 * @return User's global aggregate value class
 */
@SuppressWarnings("unchecked")
public static <M extends Writable> Class<M> getPartialAggregateValueClass(Configuration conf) {
    if (conf == null)
        conf = defaultConf;
    return (Class<M>) conf.getClass(PregelixJob.PARTIAL_AGGREGATE_VALUE_CLASS, Writable.class);
}

From source file:edu.uci.ics.pregelix.api.util.BspUtils.java

License:Apache License

/**
 * Get the user's subclassed combiner's partial combine value class.
 * //from w ww  .  j  a va2  s.c  o m
 * @param conf
 *            Configuration to check
 * @return User's global aggregate value class
 */
@SuppressWarnings("unchecked")
public static <M extends Writable> Class<M> getPartialCombineValueClass(Configuration conf) {
    if (conf == null)
        conf = defaultConf;
    return (Class<M>) conf.getClass(PregelixJob.PARTIAL_COMBINE_VALUE_CLASS, Writable.class);
}

From source file:edu.uci.ics.pregelix.api.util.BspUtils.java

License:Apache License

/**
 * Get the user's subclassed global aggregator's global value class.
 * //from w  w  w. ja v  a  2s.co m
 * @param conf
 *            Configuration to check
 * @return User's global aggregate value class
 */
@SuppressWarnings("unchecked")
public static <M extends Writable> Class<M> getFinalAggregateValueClass(Configuration conf) {
    if (conf == null)
        conf = defaultConf;
    return (Class<M>) conf.getClass(PregelixJob.FINAL_AGGREGATE_VALUE_CLASS, Writable.class);
}

From source file:edu.uci.ics.pregelix.api.util.HadoopCountersGlobalAggregateHook.java

License:Apache License

@Override
public void completeIteration(int superstep, PregelixJob job) throws HyracksDataException {
    Configuration conf = job.getConfiguration();
    String jobId = BspUtils.getJobId(conf);
    Class<?> aggClass = conf.getClass(PregelixJob.COUNTERS_AGGREGATOR_CLASS, null);
    if (aggClass == null)
        throw new HyracksDataException(
                "A subclass of HadoopCountersAggregator must active for GlobalAggregateCountersHook to operate!");
    Counters curIterCounters;/*from   ww  w  .j  a  v  a  2s  .  co  m*/
    try {
        curIterCounters = (Counters) BspUtils.readGlobalAggregateValue(conf, jobId, aggClass.getName());
    } catch (IllegalStateException e) {
        throw new HyracksDataException(
                "A subclass of HadoopCountersAggregator must active for GlobalAggregateCountersHook to operate!",
                e);
    }
    if (superstep > 1) {
        Counters prevCounters = BspUtils.readCounters(superstep - 1, conf, jobId); // the counters from the previous iterations, all aggregated together
        curIterCounters.incrAllCounters(prevCounters); // add my counters to previous ones
    }
    BspUtils.writeCounters(curIterCounters, superstep, conf, jobId);
    BspUtils.writeCountersLastIteration(superstep, conf, jobId);
}

From source file:edu.umn.cs.spatialHadoop.core.Partitioner.java

License:Open Source License

/**
 * Retrieves the value of a partitioner for a given job.
 * @param conf//from  ww w.  ja v  a2  s. c  o  m
 * @return
 */
public static Partitioner getPartitioner(Configuration conf) {
    Class<? extends Partitioner> klass = conf.getClass(PartitionerClass, Partitioner.class)
            .asSubclass(Partitioner.class);
    if (klass == null)
        return null;
    try {
        Partitioner partitioner = klass.newInstance();

        String partitionerFile = conf.get(PartitionerValue);
        if (partitionerFile != null) {
            Path[] cacheFiles = DistributedCache.getLocalCacheFiles(conf);
            for (Path cacheFile : cacheFiles) {
                if (cacheFile.getName().contains(partitionerFile)) {
                    FSDataInputStream in = FileSystem.getLocal(conf).open(cacheFile);
                    partitioner.readFields(in);
                    in.close();
                }
            }
        }
        return partitioner;
    } catch (InstantiationException e) {
        Log.warn("Error instantiating partitioner", e);
        return null;
    } catch (IllegalAccessException e) {
        Log.warn("Error instantiating partitioner", e);
        return null;
    } catch (IOException e) {
        Log.warn("Error retrieving partitioner value", e);
        return null;
    }
}

From source file:edu.umn.cs.spatialHadoop.mapreduce.SpatialInputFormat3.java

License:Open Source License

@Override
public RecordReader<K, Iterable<V>> createRecordReader(InputSplit split, TaskAttemptContext context)
        throws IOException, InterruptedException {
    Path path;/*www  .j  a va  2 s. c o  m*/
    String extension;
    if (split instanceof FileSplit) {
        FileSplit fsplit = (FileSplit) split;
        extension = FileUtil.getExtensionWithoutCompression(path = fsplit.getPath());
    } else if (split instanceof CombineFileSplit) {
        CombineFileSplit csplit = (CombineFileSplit) split;
        extension = FileUtil.getExtensionWithoutCompression(path = csplit.getPath(0));
    } else {
        throw new RuntimeException("Cannot process plits of type " + split.getClass());
    }
    // If this extension is for a compression, skip it and take the previous
    // extension
    if (extension.equals("hdf")) {
        // HDF File. Create HDFRecordReader
        return (RecordReader) new HDFRecordReader();
    }
    if (extension.equals("rtree")) {
        // File is locally indexed as RTree
        return (RecordReader) new RTreeRecordReader3<V>();
    }
    // For backward compatibility, check if the file is RTree indexed from
    // its signature
    Configuration conf = context != null ? context.getConfiguration() : new Configuration();
    if (SpatialSite.isRTree(path.getFileSystem(conf), path)) {
        return (RecordReader) new RTreeRecordReader3<V>();
    }
    // Check if a custom record reader is configured with this extension
    Class<?> recordReaderClass = conf.getClass("SpatialInputFormat." + extension + ".recordreader",
            SpatialRecordReader3.class);
    try {
        return (RecordReader<K, Iterable<V>>) recordReaderClass.newInstance();
    } catch (InstantiationException e) {
    } catch (IllegalAccessException e) {
    }
    // Use the default SpatialRecordReader if none of the above worked
    return (RecordReader) new SpatialRecordReader3<V>();
}