Example usage for org.apache.hadoop.conf Configuration setClass

List of usage examples for org.apache.hadoop.conf Configuration setClass

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration setClass.

Prototype

public void setClass(String name, Class<?> theClass, Class<?> xface) 

Source Link

Document

Set the value of the name property to the name of a theClass implementing the given interface xface.

Usage

From source file:org.apache.giraph.TestVertexTypes.java

License:Apache License

@Test
public void testDerivedMatchingType() throws SecurityException, NoSuchMethodException, NoSuchFieldException {
    @SuppressWarnings("rawtypes")
    GraphMapper<?, ?, ?, ?> mapper = new GraphMapper();
    Configuration conf = new Configuration();
    conf.setClass(GiraphJob.VERTEX_CLASS, DerivedVertexMatch.class, BasicVertex.class);
    conf.setClass(GiraphJob.VERTEX_INPUT_FORMAT_CLASS, SimpleSuperstepVertexInputFormat.class,
            VertexInputFormat.class);
    mapper.determineClassTypes(conf);//from   ww  w. j  a  va  2  s .c o m
}

From source file:org.apache.giraph.TestVertexTypes.java

License:Apache License

@Test
public void testDerivedInputFormatType() throws SecurityException, NoSuchMethodException, NoSuchFieldException {
    @SuppressWarnings("rawtypes")
    GraphMapper<?, ?, ?, ?> mapper = new GraphMapper();
    Configuration conf = new Configuration();
    conf.setClass(GiraphJob.VERTEX_CLASS, DerivedVertexMatch.class, BasicVertex.class);
    conf.setClass(GiraphJob.VERTEX_INPUT_FORMAT_CLASS, SimpleSuperstepVertexInputFormat.class,
            VertexInputFormat.class);
    mapper.determineClassTypes(conf);//from w  w w . j av a  2  s .  c  o m
}

From source file:org.apache.giraph.TestVertexTypes.java

License:Apache License

@Test
public void testMismatchingVertex() throws SecurityException, NoSuchMethodException, NoSuchFieldException {
    @SuppressWarnings("rawtypes")
    GraphMapper<?, ?, ?, ?> mapper = new GraphMapper();
    Configuration conf = new Configuration();
    conf.setClass(GiraphJob.VERTEX_CLASS, GeneratedVertexMismatch.class, BasicVertex.class);
    conf.setClass(GiraphJob.VERTEX_INPUT_FORMAT_CLASS, SimpleSuperstepVertexInputFormat.class,
            VertexInputFormat.class);
    try {/*from  w w w. j  a va2s  . c  om*/
        mapper.determineClassTypes(conf);
        throw new RuntimeException("testMismatchingVertex: Should have caught an exception!");
    } catch (IllegalArgumentException e) {
    }
}

From source file:org.apache.giraph.TestVertexTypes.java

License:Apache License

@Test
public void testMismatchingCombiner() throws SecurityException, NoSuchMethodException, NoSuchFieldException {
    @SuppressWarnings("rawtypes")
    GraphMapper<?, ?, ?, ?> mapper = new GraphMapper();
    Configuration conf = new Configuration();
    conf.setClass(GiraphJob.VERTEX_CLASS, GeneratedVertexMatch.class, BasicVertex.class);
    conf.setClass(GiraphJob.VERTEX_INPUT_FORMAT_CLASS, SimpleSuperstepVertexInputFormat.class,
            VertexInputFormat.class);
    conf.setClass(GiraphJob.VERTEX_COMBINER_CLASS, GeneratedVertexMismatchCombiner.class, VertexCombiner.class);
    try {// www  .ja va 2  s.c om
        mapper.determineClassTypes(conf);
        throw new RuntimeException("testMismatchingCombiner: Should have caught an exception!");
    } catch (IllegalArgumentException e) {
    }
}

From source file:org.apache.giraph.TestVertexTypes.java

License:Apache License

@Test
public void testJsonBase64FormatType() throws SecurityException, NoSuchMethodException, NoSuchFieldException {
    @SuppressWarnings("rawtypes")
    GraphMapper<?, ?, ?, ?> mapper = new GraphMapper();
    Configuration conf = new Configuration();
    conf.setClass(GiraphJob.VERTEX_CLASS, GeneratedVertexMatch.class, BasicVertex.class);
    conf.setClass(GiraphJob.VERTEX_INPUT_FORMAT_CLASS, JsonBase64VertexInputFormat.class,
            VertexInputFormat.class);
    conf.setClass(GiraphJob.VERTEX_OUTPUT_FORMAT_CLASS, JsonBase64VertexOutputFormat.class,
            VertexOutputFormat.class);
    mapper.determineClassTypes(conf);//from  w ww  .  j a va  2s.co m
}

From source file:org.apache.gora.mapreduce.GoraOutputFormat.java

License:Apache License

/**
 * Sets the output parameters for the job 
 * @param job the job to set the properties for
 * @param dataStoreClass the datastore class
 * @param keyClass output key class//  w  w  w .  j ava  2  s  . c om
 * @param persistentClass output value class
 * @param reuseObjects whether to reuse objects in serialization
 */
@SuppressWarnings("rawtypes")
public static <K, V extends Persistent> void setOutput(Job job, Class<? extends DataStore> dataStoreClass,
        Class<K> keyClass, Class<V> persistentClass, boolean reuseObjects) {

    Configuration conf = job.getConfiguration();

    GoraMapReduceUtils.setIOSerializations(conf, reuseObjects);

    job.setOutputFormatClass(GoraOutputFormat.class);
    job.setOutputKeyClass(keyClass);
    job.setOutputValueClass(persistentClass);
    conf.setClass(GoraOutputFormat.DATA_STORE_CLASS, dataStoreClass, DataStore.class);
    conf.setClass(GoraOutputFormat.OUTPUT_KEY_CLASS, keyClass, Object.class);
    conf.setClass(GoraOutputFormat.OUTPUT_VALUE_CLASS, persistentClass, Persistent.class);
}

From source file:org.apache.hama.bsp.message.compress.TestBSPMessageCompressor.java

License:Apache License

public void testCompression() throws IOException {
    Configuration configuration = new Configuration();
    BSPMessageCompressor<IntWritable> compressor = new BSPMessageCompressorFactory<IntWritable>()
            .getCompressor(configuration);

    assertNull(compressor);//www .ja v a 2  s  .com
    configuration.setClass(BSPMessageCompressorFactory.COMPRESSION_CODEC_CLASS, Bzip2Compressor.class,
            BSPMessageCompressor.class);
    compressor = new BSPMessageCompressorFactory<IntWritable>().getCompressor(configuration);

    assertNotNull(compressor);

    BSPMessageBundle<IntWritable> a = new BSPMessageBundle<IntWritable>();
    for (int i = 0; i < 10000; i++) {
        a.addMessage(new IntWritable(i));
    }

    ByteArrayOutputStream byteBuffer = new ByteArrayOutputStream();
    DataOutputStream bufferDos = new DataOutputStream(byteBuffer);
    a.write(bufferDos);

    byte[] compressed = compressor.compress(byteBuffer.toByteArray());
    assertTrue(byteBuffer.size() > compressed.length);
    byte[] decompressed = compressor.decompress(compressed);

    ByteArrayInputStream bis = new ByteArrayInputStream(decompressed);
    DataInputStream in = new DataInputStream(bis);

    BSPMessageBundle<IntWritable> b = new BSPMessageBundle<IntWritable>();
    b.readFields(in);
    Iterator<IntWritable> it = b.iterator();
    int counter = 0;
    while (it.hasNext()) {
        assertTrue(it.next().get() == counter);
        counter++;
    }
}

From source file:org.apache.hama.bsp.message.TestMessageIO.java

License:Apache License

public void testCombineProcessor() throws Exception {
    String fileName = System.getProperty("java.io.tmpdir") + File.separatorChar
            + new BigInteger(128, new SecureRandom()).toString(32);

    ByteBuffer buffer = ByteBuffer.allocateDirect(512);
    DirectByteBufferOutputStream stream = new DirectByteBufferOutputStream();
    stream.setBuffer(buffer);//from ww  w. j  av  a  2 s  .  co m
    IntWritable intWritable = new IntWritable(1);
    int sum = 0;
    for (int i = 0; i < 100; ++i) {
        intWritable.set(i);
        intWritable.write(stream);
        sum += i;
    }
    intWritable.write(stream);
    stream.close();
    buffer.flip();

    Configuration conf = new HamaConfiguration();

    conf.setClass(Constants.MESSAGE_CLASS, IntWritable.class, Writable.class);
    conf.setClass(Constants.COMBINER_CLASS, SumCombiner.class, Combiner.class);

    CombineSpilledDataProcessor<IntWritable> processor = new CombineSpilledDataProcessor<IntWritable>(fileName);
    assertTrue(processor.init(conf));
    File f = new File(fileName);
    try {
        assertTrue(processor.handleSpilledBuffer(new SpilledByteBuffer(buffer, 400)));
        buffer.flip();
        assertTrue(processor.handleSpilledBuffer(new SpilledByteBuffer(buffer, 400)));
        assertTrue(processor.close());

        assertTrue(f.exists());
        assertEquals(f.length(), 8);

        RandomAccessFile raf = new RandomAccessFile(fileName, "r");
        FileChannel fileChannel = raf.getChannel();
        ByteBuffer readBuff = ByteBuffer.allocateDirect(16);
        fileChannel.read(readBuff);
        readBuff.flip();
        assertEquals(readBuff.getInt(), sum);
        assertEquals(readBuff.getInt(), sum + 99);
        raf.close();
    } finally {
        assertTrue(f.delete());
    }

}

From source file:org.apache.hama.bsp.message.TestSpillingQueue.java

License:Apache License

/**
 * Test the spilling queue where the message class is specified.
 * /*from ww  w. j  av  a 2 s.  c o m*/
 * @throws Exception
 */
public void testTextSpillingQueue() throws Exception {

    String msg = "Testing the spillage of spilling buffer";
    Text text = new Text(msg);
    TaskAttemptID id = new TaskAttemptID(new TaskID("123", 1, 2), 0);
    SpillingQueue<Text> queue = new SpillingQueue<Text>();
    Configuration conf = new HamaConfiguration();

    String fileName = System.getProperty("java.io.tmpdir") + File.separatorChar
            + new BigInteger(128, new SecureRandom()).toString(32);
    File file = new File(fileName);
    conf.set(SpillingQueue.SPILLBUFFER_FILENAME, fileName);
    conf.setClass(SpillingQueue.SPILLBUFFER_MSGCLASS, Text.class, Writable.class);
    queue.init(conf, id);
    queue.prepareWrite();
    for (int i = 0; i < 1000; ++i) {
        queue.add(text);
    }
    assertEquals(queue.size(), 1000);
    queue.prepareRead();
    Text t;
    while ((t = queue.poll()) != null) {
        assertTrue(msg.equals(t.toString()));
    }
    assertEquals(queue.size(), 0);

    assertTrue(queue.poll() == null);

    assertTrue(file.exists());
    queue.close();
    assertFalse(file.exists());
}

From source file:org.apache.lens.cube.parse.TestCubeRewriter.java

License:Apache License

@Test
public void testQueryWithContinuousUpdatePeriod() throws Exception {
    Configuration conf = getConf();
    conf.set(CubeQueryConfUtil.FAIL_QUERY_ON_PARTIAL_DATA, "true");
    conf.setClass(CubeQueryConfUtil.TIME_RANGE_WRITER_CLASS, BetweenTimeRangeWriter.class,
            TimeRangeWriter.class);

    DateFormat qFmt = new SimpleDateFormat("yyyy-MM-dd-HH:mm:ss");
    String timeRangeString;//from w  w  w  .ja va2s  .c o m
    timeRangeString = getTimeRangeString(DAILY, -2, 0, qFmt);
    CubeQueryContext rewrittenQuery = rewriteCtx("select SUM(msr15) from testCube where " + timeRangeString,
            conf);

    String to = getDateStringWithOffset(DAILY, 0, CONTINUOUS);
    String from = getDateStringWithOffset(DAILY, -2, CONTINUOUS);

    String expected = "select SUM((testCube.msr15)) from TestQueryRewrite.c0_testFact_CONTINUOUS testcube"
            + " WHERE ((( testcube . dt ) between  '" + from + "'  and  '" + to + "' ))";
    System.out.println("rewrittenQuery.toHQL() " + rewrittenQuery.toHQL());
    System.out.println("expected " + expected);
    compareQueries(rewrittenQuery.toHQL(), expected);

    //test with msr2 on different fact
    rewrittenQuery = rewriteCtx("select SUM(msr2) from testCube where " + timeRangeString, conf);
    expected = "select SUM((testCube.msr2)) from TestQueryRewrite.c0_testFact testcube"
            + " WHERE ((( testcube . dt ) between  '" + from + "'  and  '" + to + "' ))";
    System.out.println("rewrittenQuery.toHQL() " + rewrittenQuery.toHQL());
    System.out.println("expected " + expected);
    compareQueries(rewrittenQuery.toHQL(), expected);

    //from date 6 days back
    timeRangeString = getTimeRangeString(DAILY, -6, 0, qFmt);
    LensException th = getLensExceptionInRewrite("select SUM(msr15) from testCube where " + timeRangeString,
            getConf());
    assertEquals(th.getErrorCode(),
            LensCubeErrorCode.NO_CANDIDATE_FACT_AVAILABLE.getLensErrorInfo().getErrorCode());
}