Example usage for org.apache.hadoop.conf Configuration getInt

List of usage examples for org.apache.hadoop.conf Configuration getInt

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration getInt.

Prototype

public int getInt(String name, int defaultValue) 

Source Link

Document

Get the value of the name property as an int.

Usage

From source file:com.taobao.adfs.file.FileRepository.java

License:Apache License

public FileRepository(Configuration conf) throws IOException {
    super(conf);/*from   w w w .ja  va 2  s.  co  m*/
    nameMaxLength = conf.getInt("file.name.length.max", File.nameMaxLength);
    if (nameMaxLength > File.nameMaxLength) {
        Utilities.logInfo(logger, "file.name.length.max is ", nameMaxLength, ", large than ",
                File.nameMaxLength, ", limit it to be ", File.nameMaxLength);
        nameMaxLength = File.nameMaxLength;
    }
    nextIdForFreeId.set(new Random().nextLong());
}

From source file:com.tdunning.plume.local.lazy.MapRedExecutor.java

License:Apache License

/**
 * Builds the execution tree out of a {@link PlumeWorkflow} by reading its class in hadoop configuration.
 * Then, it identifies which MSCR step is being executed by looking at the mscr_id parameter. Finally, it returns this MSCR.
 * //from   www  . j  ava  2 s .  c o  m
 * @param conf
 * @return
 */
static MSCR readMSCR(Configuration conf) {
    String className = conf.get(MapRedExecutor.WORKFLOW_NAME);
    int id = conf.getInt(MapRedExecutor.MSCR_ID, -1);
    if (id == -1) {
        throw new RuntimeException("No MSCR ID in hadoop conf.");
    }
    try {
        PlumeWorkflow workFlow = (PlumeWorkflow) Class.forName(className).newInstance();
        Optimizer optimizer = new Optimizer();
        ExecutionStep step = optimizer.optimize(workFlow);
        do {
            for (MSCR mscr : step.mscrSteps) {
                if (mscr.getId() == id) {
                    return mscr;
                }
            }
            step = step.nextStep;
        } while (step != null);
        throw new RuntimeException("Invalid MSCR ID in hadoop conf: " + id);
    } catch (InstantiationException e) {
        throw new RuntimeException(e);
    } catch (IllegalAccessException e) {
        throw new RuntimeException(e);
    } catch (ClassNotFoundException e) {
        throw new RuntimeException(e);
    }
}

From source file:com.tgam.hadoop.mapred.EscapedLineRecordReader.java

License:Apache License

public EscapedLineRecordReader(Configuration job, FileSplit split) throws IOException {
    this.maxLineLength = job.getInt("mapred.linerecordreader.maxlength", Integer.MAX_VALUE);
    start = split.getStart();/*from  w w  w  .  ja  v a2 s. co m*/
    end = start + split.getLength();
    final Path file = split.getPath();
    compressionCodecs = new CompressionCodecFactory(job);
    final CompressionCodec codec = compressionCodecs.getCodec(file);

    // open the file and seek to the start of the split
    FileSystem fs = file.getFileSystem(job);
    FSDataInputStream fileIn = fs.open(split.getPath());
    boolean skipFirstLine = false;
    if (codec != null) {
        in = new LineReader(codec.createInputStream(fileIn), job);
        end = Long.MAX_VALUE;
    } else {
        if (start != 0) {
            skipFirstLine = true;
            --start;
            fileIn.seek(start);
        }
        in = new LineReader(fileIn, job);
    }
    if (skipFirstLine) { // skip first line and re-establish "start".
        start += in.readLine(new Text(), 0, (int) Math.min((long) Integer.MAX_VALUE, end - start));
    }
    this.pos = start;
}

From source file:com.tgam.hadoop.mapred.EscapedLineRecordReader.java

License:Apache License

public EscapedLineRecordReader(InputStream in, long offset, long endOffset, Configuration job)
        throws IOException {
    this.maxLineLength = job.getInt("mapred.linerecordreader.maxlength", Integer.MAX_VALUE);
    this.in = new LineReader(in, job);
    this.start = offset;
    this.pos = offset;
    this.end = endOffset;
}

From source file:com.tgam.hadoop.util.GenericEscapedLineReader.java

License:Apache License

/**
 * Create a line reader that reads from the given stream using the
 * <code>io.file.buffer.size</code> specified in the given
 * <code>Configuration</code>.
 * @param in input stream/*from ww w  .j a  v  a  2  s  .  c o  m*/
 * @param conf configuration
 * @throws IOException
 */
public GenericEscapedLineReader(InputStream in, Configuration conf) throws IOException {
    this(in, conf.getInt("io.file.buffer.size", DEFAULT_BUFFER_SIZE));
}

From source file:com.thinkbiganalytics.inputformat.hadoop.mapred.EscapedLineReader.java

License:Open Source License

/**
 * Create a multi-line reader that reads from the given stream using the
 * <code>io.file.buffer.size</code> specified in the given
 * <code>Configuration</code>.
 *
 * @param in   input stream/*from   ww  w.  j  a va 2  s. c  o m*/
 * @param conf configuration
 */
public EscapedLineReader(InputStream in, Configuration conf) throws IOException {
    this(in, conf.getInt("io.file.buffer.size", DEFAULT_BUFFER_SIZE), DEFAULT_ESCAPE_CHARACTER);
}

From source file:com.thinkbiganalytics.inputformat.hadoop.mapred.EscapedLineReader.java

License:Open Source License

public EscapedLineReader(InputStream in, Configuration conf, byte escapeChar) throws IOException {
    this(in, conf.getInt("io.file.buffer.size", DEFAULT_BUFFER_SIZE), escapeChar);
}

From source file:com.thinkbiganalytics.inputformat.hadoop.mapred.OmnitureDataFileRecordReader.java

License:Open Source License

public OmnitureDataFileRecordReader(Configuration job, FileSplit split) throws IOException {

    this.maxLineLength = job.getInt("mapred.escapedlinereader.maxlength", Integer.MAX_VALUE);
    this.start = split.getStart();
    this.end = start + split.getLength();
    final Path file = split.getPath();
    this.compressionCodecs = new CompressionCodecFactory(job);
    final CompressionCodec codec = compressionCodecs.getCodec(file);

    // Open the file and seek to the start of the split
    FileSystem fs = file.getFileSystem(job);
    FSDataInputStream fileIn = fs.open(split.getPath());
    boolean skipFirstLine = false;
    if (codec != null) {
        lineReader = new EscapedLineReader(codec.createInputStream(fileIn), job);
        end = Long.MAX_VALUE;//from  www.  ja v a2s. co  m
    } else {
        if (start != 0) {
            skipFirstLine = true;
            --start;
            fileIn.seek(start);
        }
        lineReader = new EscapedLineReader(fileIn, job);
    }
    if (skipFirstLine) {
        start += lineReader.readLine(new Text(), 0, (int) Math.min((long) Integer.MAX_VALUE, end - start));
    }
    this.pos = start;
}

From source file:com.thinkbiganalytics.kylo.catalog.azure.AzureNativeFileSystemProvider.java

License:Apache License

@Nonnull
private CloudBlobClient createBlobClient(@Nonnull final URI uri, @Nonnull final Configuration conf) {
    // Determine endpoint
    final String httpScheme = StringUtils.equalsAnyIgnoreCase(uri.getScheme(), "asvs", "wasbs") ? "https"
            : "http";
    final URI blobEndPoint = URI.create(httpScheme + "://" + uri.getRawAuthority());

    // Create client
    final CloudBlobClient client = new CloudBlobClient(blobEndPoint, getCredentials(uri, conf));

    final RetryPolicyFactory retryPolicyFactory = new RetryExponentialRetry(
            conf.getInt("fs.azure.io.retry.min.backoff.interval", 3 * 1000 /* 1s */),
            conf.getInt("fs.azure.io.retry.backoff.interval", 3 * 1000 /* 1s */),
            conf.getInt("fs.azure.io.retry.max.backoff.interval", 30 * 1000 /* 30s */),
            conf.getInt("fs.azure.io.retry.max.retries", 30));
    client.getDefaultRequestOptions().setRetryPolicyFactory(retryPolicyFactory);

    final int storageConnectionTimeout = conf.getInt("fs.azure.storage.timeout", 0);
    if (storageConnectionTimeout > 0) {
        client.getDefaultRequestOptions().setTimeoutIntervalInMs(storageConnectionTimeout * 1000);
    }/*from w  w  w.  ja  va2 s .  c om*/

    return client;
}

From source file:com.tomslabs.grid.avro.AvroFileOutputFormat.java

License:Apache License

@Override
public RecordWriter<T, Object> getRecordWriter(TaskAttemptContext context)
        throws IOException, InterruptedException {
    Configuration config = context.getConfiguration();

    Schema schema = getWriteSchema(config);
    DatumWriter<T> datumWriter = getDatumWriter(config);

    final DataFileWriter<T> writer = new DataFileWriter<T>(datumWriter);

    if (getCompressOutput(context)) {
        int level = config.getInt(DEFLATE_LEVEL_KEY, DEFAULT_DEFLATE_LEVEL);
        writer.setCodec(CodecFactory.deflateCodec(level));
    }// ww w .  j  a  v  a  2  s.co m

    Path file = getDefaultWorkFile(context, EXT);
    FileSystem fs = file.getFileSystem(config);

    writer.create(schema, fs.create(file));

    return new AvroRecordWriter<T>(writer);
}