Example usage for org.apache.hadoop.conf Configuration getBoolean

List of usage examples for org.apache.hadoop.conf Configuration getBoolean

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration getBoolean.

Prototype

public boolean getBoolean(String name, boolean defaultValue) 

Source Link

Document

Get the value of the name property as a boolean.

Usage

From source file:org.apache.carbondata.processing.loading.jsoninput.JsonInputFormat.java

License:Apache License

/**
 * Gets if this is configured as one JSON record per line.
 *
 * @param conf the Job configuration/* w  w  w  . j av  a  2  s  .co m*/
 * @return True if one JSON record per line, false otherwise.
 */
public static boolean getOneRecordPerLine(Configuration conf) {
    return conf.getBoolean(ONE_RECORD_PER_LINE, false);
}

From source file:org.apache.cassandra.hadoop.cql3.CqlBulkOutputFormat.java

License:Apache License

public static boolean getDeleteSourceOnSuccess(Configuration conf) {
    return conf.getBoolean(DELETE_SOURCE, false);
}

From source file:org.apache.cassandra.hadoop.hive.metastore.CassandraClientHolder.java

License:Apache License

public CassandraClientHolder(Configuration conf) {
    this.keyspaceName = conf.get(CONF_PARAM_KEYSPACE_NAME, DEF_META_STORE_KEYSPACE);
    this.columnFamily = conf.get(CONF_PARAM_CF_NAME, DEF_META_STORE_CF);
    this.readCl = ConsistencyLevel
            .findByValue(conf.getInt(CONF_PARAM_READ_CL, ConsistencyLevel.QUORUM.getValue()));
    this.writeCl = ConsistencyLevel
            .findByValue(conf.getInt(CONF_PARAM_WRITE_CL, ConsistencyLevel.QUORUM.getValue()));
    try {//w  w  w  . jav a2s. c o  m
        client = CassandraProxyClient.newProxyConnection(conf.get(CONF_PARAM_HOST, "localhost"),
                conf.getInt(CONF_PARAM_PORT, 9160), conf.getBoolean(CONF_PARAM_FRAMED, true),
                ConnectionStrategy.valueOf(conf.get(CONF_PARAM_CONNECTION_STRATEGY, "STICKY")));

    } catch (IOException ioe) {
        throw new CassandraHiveMetaStoreException("Could not connect to Cassandra. Reason: " + ioe.getMessage(),
                ioe);
    }
}

From source file:org.apache.crunch.impl.mr.plan.DotfileUtil.java

License:Apache License

/**
 * Determine if the creation of debugging dotfiles (which explain various stages in the job planning process)
 * is enabled./*w  w w.ja va2  s. c o  m*/
 * <p/>
 * In order for this to be <tt>true</tt>, {@link #setPipelineDotfileOutputDir(Configuration, String)} needs to also
 * have been called with the same configuration object.
 * <p/>
 * Note that regardless of whether or not debugging dotfile creation is enabled, the high-level job plan will always
 * be dumped if {@link #setPipelineDotfileOutputDir(Configuration, String)} has been called.
 *
 * @param conf pipeline configuration
 * @return <tt>true</tt> if the creation of debugging dotfiles is enabled, otherwise <tt>false</tt>
 */
public static boolean isDebugDotfilesEnabled(Configuration conf) {
    return conf.getBoolean(PlanningParameters.DEBUG_DOTFILES_ENABLED, false)
            && conf.get(PlanningParameters.PIPELINE_DOTFILE_OUTPUT_DIR) != null;
}

From source file:org.apache.crunch.impl.mr.run.CrunchInputFormat.java

License:Apache License

@Override
public List<InputSplit> getSplits(JobContext job) throws IOException, InterruptedException {
    List<InputSplit> splits = Lists.newArrayList();
    Configuration base = job.getConfiguration();
    Map<FormatBundle, Map<Integer, List<Path>>> formatNodeMap = CrunchInputs.getFormatNodeMap(job);

    // First, build a map of InputFormats to Paths
    for (Map.Entry<FormatBundle, Map<Integer, List<Path>>> entry : formatNodeMap.entrySet()) {
        FormatBundle inputBundle = entry.getKey();
        Configuration conf = new Configuration(base);
        inputBundle.configure(conf);/* w  w  w  .  ja  v  a2  s.  co m*/
        Job jobCopy = new Job(conf);
        InputFormat<?, ?> format = (InputFormat<?, ?>) ReflectionUtils.newInstance(inputBundle.getFormatClass(),
                jobCopy.getConfiguration());
        if (format instanceof FileInputFormat
                && !conf.getBoolean(RuntimeParameters.DISABLE_COMBINE_FILE, false)) {
            format = new CrunchCombineFileInputFormat<Object, Object>(job);
        }
        for (Map.Entry<Integer, List<Path>> nodeEntry : entry.getValue().entrySet()) {
            Integer nodeIndex = nodeEntry.getKey();
            List<Path> paths = nodeEntry.getValue();
            FileInputFormat.setInputPaths(jobCopy, paths.toArray(new Path[paths.size()]));

            // Get splits for each input path and tag with InputFormat
            // and Mapper types by wrapping in a TaggedInputSplit.
            List<InputSplit> pathSplits = format.getSplits(jobCopy);
            for (InputSplit pathSplit : pathSplits) {
                splits.add(new CrunchInputSplit(pathSplit, inputBundle, nodeIndex, conf));
            }
        }
    }
    return splits;
}

From source file:org.apache.crunch.impl.mr.run.CrunchTaskContext.java

License:Apache License

public boolean isDebugRun() {
    Configuration conf = taskContext.getConfiguration();
    return conf.getBoolean(RuntimeParameters.DEBUG, false);
}

From source file:org.apache.crunch.impl.mr.run.RTNode.java

License:Apache License

public void initialize(CrunchTaskContext ctxt) {
    if (emitter != null) {
        // Already initialized
        return;/*from  w w  w.jav a2  s  .  c o  m*/
    }
    fn.setContext(ctxt.getContext());
    fn.initialize();
    for (RTNode child : children) {
        child.initialize(ctxt);
    }

    if (outputConverter != null) {
        if (outputName != null) {
            this.emitter = new MultipleOutputEmitter(outputConverter, ctxt.getMultipleOutputs(), outputName);
        } else {
            this.emitter = new OutputEmitter(outputConverter, ctxt.getContext());
        }
    } else if (!children.isEmpty()) {
        Configuration conf = ctxt.getContext().getConfiguration();
        boolean disableDeepCopy = conf.getBoolean(RuntimeParameters.DISABLE_DEEP_COPY, false);
        this.emitter = new IntermediateEmitter(outputPType, children, conf,
                disableDeepCopy || fn.disableDeepCopy());
    } else {
        throw new CrunchRuntimeException("Invalid RTNode config: no emitter for: " + nodeName);
    }
}

From source file:org.apache.crunch.io.hbase.HFileOutputFormatForCrunch.java

License:Apache License

@Override
public RecordWriter<Object, KeyValue> getRecordWriter(final TaskAttemptContext context)
        throws IOException, InterruptedException {
    Path outputPath = getDefaultWorkFile(context, "");
    Configuration conf = context.getConfiguration();
    FileSystem fs = outputPath.getFileSystem(conf);

    final boolean compactionExclude = conf.getBoolean(COMPACTION_EXCLUDE_CONF_KEY, false);

    String hcolStr = conf.get(HCOLUMN_DESCRIPTOR_KEY);
    if (hcolStr == null) {
        throw new AssertionError(HCOLUMN_DESCRIPTOR_KEY + " is not set in conf");
    }//  w  w w  .  jav a  2  s  .  c o m
    byte[] hcolBytes;
    try {
        hcolBytes = Hex.decodeHex(hcolStr.toCharArray());
    } catch (DecoderException e) {
        throw new AssertionError("Bad hex string: " + hcolStr);
    }
    HColumnDescriptor hcol = new HColumnDescriptor();
    hcol.readFields(new DataInputStream(new ByteArrayInputStream(hcolBytes)));
    LOG.info("Output path: " + outputPath);
    LOG.info("HColumnDescriptor: " + hcol.toString());
    final HFile.Writer writer = HFile.getWriterFactoryNoCache(conf).withPath(fs, outputPath)
            .withBlockSize(hcol.getBlocksize()).withCompression(hcol.getCompression())
            .withComparator(KeyValue.KEY_COMPARATOR)
            .withDataBlockEncoder(new HFileDataBlockEncoderImpl(hcol.getDataBlockEncoding()))
            .withChecksumType(Store.getChecksumType(conf)).withBytesPerChecksum(Store.getBytesPerChecksum(conf))
            .create();

    return new RecordWriter<Object, KeyValue>() {
        @Override
        public void write(Object row, KeyValue kv) throws IOException {
            if (kv.getTimestamp() == HConstants.LATEST_TIMESTAMP) {
                kv.updateLatestStamp(now);
            }
            writer.append(kv);
            trt.includeTimestamp(kv);
        }

        @Override
        public void close(TaskAttemptContext c) throws IOException, InterruptedException {
            writer.appendFileInfo(StoreFile.BULKLOAD_TIME_KEY, Bytes.toBytes(System.currentTimeMillis()));
            writer.appendFileInfo(StoreFile.BULKLOAD_TASK_KEY,
                    Bytes.toBytes(context.getTaskAttemptID().toString()));
            writer.appendFileInfo(StoreFile.MAJOR_COMPACTION_KEY, Bytes.toBytes(true));
            writer.appendFileInfo(StoreFile.EXCLUDE_FROM_MINOR_COMPACTION_KEY,
                    Bytes.toBytes(compactionExclude));
            writer.appendFileInfo(StoreFile.TIMERANGE_KEY, WritableUtils.toByteArray(trt));
            writer.close();
        }
    };
}

From source file:org.apache.crunch.io.hbase.HFileReaderFactory.java

License:Apache License

@Override
public Iterator<KeyValue> read(FileSystem fs, Path path) {
    Configuration conf = fs.getConf();
    CacheConfig cacheConfig = new CacheConfig(conf);
    try {//from w w w  .ja v  a2  s . c  o m
        HFile.Reader hfr = HFile.createReader(fs, path, cacheConfig);
        HFileScanner scanner = hfr.getScanner(conf.getBoolean(HFILE_SCANNER_CACHE_BLOCKS, false),
                conf.getBoolean(HFILE_SCANNER_PREAD, false));
        scanner.seekTo();
        return new HFileIterator(scanner);
    } catch (IOException e) {
        throw new RuntimeException(e);
    }
}

From source file:org.apache.crunch.types.avro.SafeAvroSerialization.java

License:Apache License

/**
 * Returns the specified map output deserializer. Defaults to the final output
 * deserializer if no map output schema was specified.
 *//*w ww . j  av  a  2 s . c  o  m*/
public Deserializer<AvroWrapper<T>> getDeserializer(Class<AvroWrapper<T>> c) {
    boolean isKey = AvroKey.class.isAssignableFrom(c);
    Configuration conf = getConf();
    Schema schema = isKey ? Pair.getKeySchema(AvroJob.getMapOutputSchema(conf))
            : Pair.getValueSchema(AvroJob.getMapOutputSchema(conf));

    DatumReader<T> datumReader = null;
    if (conf.getBoolean(AvroJob.MAP_OUTPUT_IS_REFLECT, false)) {
        ReflectDataFactory factory = (ReflectDataFactory) ReflectionUtils
                .newInstance(conf.getClass("crunch.reflectdatafactory", ReflectDataFactory.class), conf);
        datumReader = factory.getReader(schema);
    } else {
        datumReader = new SpecificDatumReader<T>(schema);
    }
    return new AvroWrapperDeserializer(datumReader, isKey);
}