Example usage for org.apache.hadoop.conf Configuration set

List of usage examples for org.apache.hadoop.conf Configuration set

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration set.

Prototype

public void set(String name, String value) 

Source Link

Document

Set the value of the name property.

Usage

From source file:com.cloudera.crunch.impl.mr.run.CrunchInputs.java

License:Apache License

public static void addInputPath(Job job, Path path, Class<? extends InputFormat> inputFormatClass,
        int nodeIndex) {
    Configuration conf = job.getConfiguration();
    String inputs = JOINER.join(inputFormatClass.getName(), nodeIndex, path.toString());
    String existing = conf.get(RuntimeParameters.MULTI_INPUTS);
    conf.set(RuntimeParameters.MULTI_INPUTS, existing == null ? inputs : existing + RECORD_SEP + inputs);
}

From source file:com.cloudera.crunch.impl.mr.run.RTNodeSerializer.java

License:Open Source License

public void serialize(List<DoNode> nodes, Configuration conf, NodeContext context) throws IOException {
    List<RTNode> rtNodes = Lists.newArrayList();
    for (DoNode node : nodes) {
        rtNodes.add(node.toRTNode(true, conf, context));
    }//from   ww  w.  j  ava 2s  . c om
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    ObjectOutputStream oos = new ObjectOutputStream(baos);
    oos.writeObject(rtNodes);
    String serialized = Base64.encodeBase64String(baos.toByteArray());
    conf.set(context.getConfigurationKey(), serialized);
}

From source file:com.cloudera.crunch.io.avro.AvroFileSourceTarget.java

License:Open Source License

@Override
public void configureSource(Job job, int inputId) throws IOException {
    SourceTargetHelper.configureSource(job, inputId, AvroInputFormat.class, path);

    Configuration conf = job.getConfiguration();
    String inputSchema = conf.get("avro.input.schema");
    if (inputSchema == null) {
        conf.set("avro.input.schema", ptype.getSchema().toString());
    } else if (!inputSchema.equals(ptype.getSchema().toString())) {
        throw new IllegalStateException("Multiple Avro sources must use the same schema");
    }/*from  w  w w.ja v  a  2s.c o m*/
}

From source file:com.cloudera.crunch.io.avro.AvroFileTarget.java

License:Open Source License

@Override
public void configureForMapReduce(Job job, PType<?> ptype, Path outputPath, String name) {
    AvroType<?> atype = (AvroType<?>) ptype;
    Configuration conf = job.getConfiguration();
    String outputSchema = conf.get("avro.output.schema");
    if (outputSchema == null) {
        conf.set("avro.output.schema", atype.getSchema().toString());
    } else if (!outputSchema.equals(atype.getSchema().toString())) {
        throw new IllegalStateException("Avro targets must use the same output schema");
    }/*from  w w  w  .  j a v  a 2s  . c o  m*/

    SourceTargetHelper.configureTarget(job, AvroOutputFormat.class, ptype.getDataBridge(), outputPath, name);
}

From source file:com.cloudera.crunch.io.hbase.HBaseSourceTarget.java

License:Open Source License

@Override
public void configureSource(Job job, int inputId) throws IOException {
    Configuration conf = job.getConfiguration();
    job.setInputFormatClass(TableInputFormat.class);
    job.setMapperClass(CrunchMapper.class);
    HBaseConfiguration.addHbaseResources(conf);
    conf.set(TableInputFormat.INPUT_TABLE, table);
    conf.set(TableInputFormat.SCAN, convertScanToString(scan));
    TableMapReduceUtil.addDependencyJars(job);
}

From source file:com.cloudera.crunch.io.hbase.HBaseTarget.java

License:Open Source License

@Override
public void configureForMapReduce(Job job, PType<?> ptype, Path outputPath, String name) {
    Configuration conf = job.getConfiguration();
    HBaseConfiguration.addHbaseResources(conf);
    job.setOutputFormatClass(TableOutputFormat.class);
    conf.set(TableOutputFormat.OUTPUT_TABLE, table);
    try {/*from  w  ww.  j av a 2s  . c o m*/
        TableMapReduceUtil.addDependencyJars(job);
    } catch (IOException e) {
        throw new CrunchRuntimeException(e);
    }
}

From source file:com.cloudera.crunch.lib.Sort.java

License:Open Source License

private static GroupingOptions buildGroupingOptions(Configuration conf, PTypeFamily tf, PType ptype,
        Order order) {//from w w w.ja v  a  2  s.  c  o m
    Builder builder = GroupingOptions.builder();
    if (order == Order.DESCENDING) {
        if (tf == WritableTypeFamily.getInstance()) {
            builder.sortComparatorClass(ReverseWritableComparator.class);
        } else if (tf == AvroTypeFamily.getInstance()) {
            AvroType avroType = (AvroType) ptype;
            Schema schema = avroType.getSchema();
            conf.set("crunch.schema", schema.toString());
            builder.sortComparatorClass(ReverseAvroComparator.class);
        } else {
            throw new RuntimeException("Unrecognized type family: " + tf);
        }
    }
    return builder.build();
}

From source file:com.cloudera.crunch.WordCountHBaseTest.java

License:Open Source License

@Before
public void setUp() throws Exception {
    Configuration conf = hbaseTestUtil.getConfiguration();
    File tmpDir = File.createTempFile("logdir", "");
    tmpDir.delete();/*ww w  .ja  v a  2 s  .  c o  m*/
    tmpDir.mkdir();
    tmpDir.deleteOnExit();
    conf.set("hadoop.log.dir", tmpDir.getAbsolutePath());
    conf.set(HConstants.ZOOKEEPER_ZNODE_PARENT, "/1");
    conf.setInt("hbase.master.info.port", -1);
    conf.setInt("hbase.regionserver.info.port", -1);
    hbaseTestUtil.startMiniZKCluster();
    hbaseTestUtil.startMiniCluster();
    hbaseTestUtil.startMiniMapReduceCluster();
}

From source file:com.cloudera.data.filesystem.TestFileSystemDatasetRepository.java

License:Apache License

@Before
public void setUp() throws IOException {
    Configuration conf = new Configuration();

    conf.set("fs.default.name", "file:///");

    fileSystem = FileSystem.get(conf);
    testDirectory = new Path(Files.createTempDir().getAbsolutePath());
    repo = new FileSystemDatasetRepository(fileSystem, testDirectory);

    testSchema = Schema.createRecord("Test", "Test record schema", "com.cloudera.data.filesystem", false);
    testSchema.setFields(Lists.newArrayList(new Field("name", Schema.create(Type.STRING), null, null)));
}

From source file:com.cloudera.data.filesystem.TestFileSystemDatasetWriter.java

License:Apache License

@Before
public void setUp() throws IOException {
    testDirectory = Files.createTempDir();

    Configuration conf = new Configuration();

    conf.set("fs.default.name", "file:///");
    fileSystem = FileSystem.get(conf);
}