Example usage for org.apache.hadoop.conf Configuration setLong

List of usage examples for org.apache.hadoop.conf Configuration setLong

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration setLong.

Prototype

public void setLong(String name, long value) 

Source Link

Document

Set the value of the name property to a long.

Usage

From source file:com.blackberry.logdriver.util.GrepByTime.java

License:Apache License

@Override
public int run(String[] args) throws Exception {
    Configuration conf = getConf(); // Configuration processed by ToolRunner
    List<String> grepArgs = new ArrayList<String>();

    String regex = args[0];//from   w ww  .j av a  2  s  .  com
    String dcNumber = args[1];
    String service = args[2];
    String component = args[3];
    long startTime = Long.parseLong(args[4]);
    long endTime = Long.parseLong(args[5]);
    String output = args[6];

    // Add the start and end time to the configuration
    conf.setLong("logdriver.grep.start.time", startTime);
    conf.setLong("logdriver.grep.end.time", endTime);

    // the first grep arg is regex
    grepArgs.add(regex);

    // Get paths
    FileManager fm = new FileManager(conf);
    List<PathInfo> paths = fm.getPathInfo(dcNumber, service, component, startTime, endTime);

    if (paths.isEmpty()) {
        System.err.println("No logs found for the given component(s) and time range.");
        return 1;
    }

    int retval = 99;
    try {
        // Lock, then get the real paths
        fm.acquireReadLocks(paths);
        for (PathInfo pi : paths) {
            LOG.info("Adding path: {}", pi.getFullPath());
            grepArgs.addAll(fm.getInputPaths(pi));
        }

        // The last arg is output directory
        grepArgs.add(output);

        // Now run Grep
        fm.acquireReadLocks(paths);
        LOG.info("Sending args to Grep: {}", grepArgs);
        retval = ToolRunner.run(conf, new Grep(), grepArgs.toArray(new String[0]));
    } finally {
        fm.releaseReadLocks(paths);
    }

    return retval;
}

From source file:com.blackberry.logdriver.util.MultiSearch.java

License:Apache License

@Override
public int run(String[] args) throws Exception {
    Configuration conf = getConf(); // Configuration processed by ToolRunner
    // If run by Oozie, then load the Oozie conf too
    if (System.getProperty("oozie.action.conf.xml") != null) {
        conf.addResource(new URL("file://" + System.getProperty("oozie.action.conf.xml")));
    }//from ww w .jav a 2 s.  c  o m

    FileSystem fs = FileSystem.get(conf);

    // The command line options
    String searchStringDir = null;
    List<Path> paths = new ArrayList<Path>();
    Path outputDir = null;

    // Load input files from the command line
    if (args.length < 3) {
        System.out.println("usage: [genericOptions] searchStringDirectory input [input ...] output");
        System.exit(1);
    }

    // Get the files we need from the command line.
    searchStringDir = args[0];
    // We are going to be reading all the files in this directory a lot. So
    // let's up the replication factor by a lot so that they're easy to read.
    for (FileStatus f : fs.listStatus(new Path(searchStringDir))) {
        fs.setReplication(f.getPath(), (short) 16);
    }

    for (int i = 1; i < args.length - 1; i++) {
        for (FileStatus f : fs.globStatus(new Path(args[i]))) {
            paths.add(f.getPath());
        }
    }

    outputDir = new Path(args[args.length - 1]);

    @SuppressWarnings("deprecation")
    Job job = new Job(conf);
    Configuration jobConf = job.getConfiguration();

    job.setJarByClass(MultiSearch.class);
    jobConf.setIfUnset("mapred.job.name", "MultiSearch");

    // To propagate credentials within Oozie
    if (System.getenv("HADOOP_TOKEN_FILE_LOCATION") != null) {
        jobConf.set("mapreduce.job.credentials.binary", System.getenv("HADOOP_TOKEN_FILE_LOCATION"));
    }

    // Good output separators include things that are unsupported by XML. So we
    // just send the byte value of the character through. The restriction here
    // is that it can't be more than 1 byte when UTF-8 encoded, since it will be
    // read by Pig which only deals with single byte separators.
    {
        String outputSeparator = jobConf.get("logdriver.output.field.separator", DEFAULT_OUTPUT_SEPARATOR);
        byte[] bytes = outputSeparator.getBytes(UTF_8);
        if (bytes.length != 1) {
            LOG.error("The output separator must be a single byte in UTF-8.");
            return 1;
        }

        jobConf.set("logdriver.output.field.separator", Byte.toString(bytes[0]));
    }

    jobConf.set("logdriver.search.string.dir", searchStringDir);

    // This search is generally too fast to make good use of 128MB blocks, so
    // let's set the value to 256MB (if it's not set already)
    if (jobConf.get("mapred.max.split.size") == null) {
        jobConf.setLong("mapred.max.split.size", 256 * 1024 * 1024);
    }

    job.setInputFormatClass(AvroBlockInputFormat.class);
    job.setMapperClass(SearchMapper.class);
    job.setMapOutputKeyClass(Text.class);
    job.setMapOutputValueClass(NullWritable.class);

    job.setNumReduceTasks(0);

    job.setOutputFormatClass(TextOutputFormat.class);
    TextOutputFormat.setOutputPath(job, outputDir);
    for (Path path : paths) {
        AvroBlockInputFormat.addInputPath(job, path);
    }

    // Run the job.
    if (conf.getBoolean("job.wait", DEFAULT_WAIT_JOB)) {
        return job.waitForCompletion(true) ? 0 : 1;
    } else {
        job.submit();
        return 0;
    }
}

From source file:com.blackberry.logdriver.util.MultiSearchByTime.java

License:Apache License

@Override
public int run(String[] args) throws Exception {
    Configuration conf = getConf(); // Configuration processed by ToolRunner
    List<String> searchArgs = new ArrayList<String>();

    String searchStringDir = args[0];
    String dcNumber = args[1];//from  w ww  .  j a v  a 2 s . c om
    String service = args[2];
    String component = args[3];
    long startTime = Long.parseLong(args[4]);
    long endTime = Long.parseLong(args[5]);
    String output = args[6];

    // Add the start and end time to the configuration
    conf.setLong("logdriver.search.start.time", startTime);
    conf.setLong("logdriver.search.end.time", endTime);

    // the first arg is regex
    searchArgs.add(searchStringDir);

    // Get paths
    FileManager fm = new FileManager(conf);
    List<PathInfo> paths = fm.getPathInfo(dcNumber, service, component, startTime, endTime);

    if (paths.isEmpty()) {
        System.err.println("No logs found for the given component(s) and time range.");
        return 1;
    }

    int retval = 99;
    try {
        // Lock, then get the real paths
        fm.acquireReadLocks(paths);
        for (PathInfo pi : paths) {
            LOG.info("Adding path: {}", pi.getFullPath());
            searchArgs.addAll(fm.getInputPaths(pi));
        }

        // The last arg is output directory
        searchArgs.add(output);
        LOG.info("Sending args to MultiSearch: {}", searchArgs);
        retval = ToolRunner.run(conf, new MultiSearch(), searchArgs.toArray(new String[0]));
    } finally {
        fm.releaseReadLocks(paths);
    }

    return retval;
}

From source file:com.blackberry.logdriver.util.SearchByTime.java

License:Apache License

@Override
public int run(String[] args) throws Exception {
    Configuration conf = getConf(); // Configuration processed by ToolRunner
    List<String> searchArgs = new ArrayList<String>();

    String searchString = args[0];
    String dcNumber = args[1];//from   w  ww  .j  av a 2 s  . co m
    String service = args[2];
    String component = args[3];
    long startTime = Long.parseLong(args[4]);
    long endTime = Long.parseLong(args[5]);
    String output = args[6];

    // Add the start and end time to the configuration
    conf.setLong("logdriver.search.start.time", startTime);
    conf.setLong("logdriver.search.end.time", endTime);

    // the first arg is the search string
    searchArgs.add(searchString);

    // Get paths
    FileManager fm = new FileManager(conf);
    List<PathInfo> paths = fm.getPathInfo(dcNumber, service, component, startTime, endTime);

    if (paths.isEmpty()) {
        System.err.println("No logs found for the given component(s) and time range.");
        return 1;
    }

    int retval = 99;
    try {
        // Lock, then get the real paths
        fm.acquireReadLocks(paths);
        for (PathInfo pi : paths) {
            LOG.info("Adding path: {}", pi.getFullPath());
            searchArgs.addAll(fm.getInputPaths(pi));
        }

        // The last arg is output directory
        searchArgs.add(output);

        // Now run Search
        LOG.info("Sending args to Search: {}", searchArgs);
        retval = ToolRunner.run(conf, new Search(), searchArgs.toArray(new String[0]));
    } finally {
        fm.releaseReadLocks(paths);
    }

    return retval;
}

From source file:com.cloudera.llama.am.cache.TestResourceCache.java

License:Apache License

private void testTimeoutEvictionPolicy(long timeout) throws Exception {
    ResourceCache.TimeoutEvictionPolicy ep = new ResourceCache.TimeoutEvictionPolicy();

    Configuration conf = new Configuration(false);
    if (timeout > 0) {
        conf.setLong(LlamaAM.EVICTION_IDLE_TIMEOUT_KEY, timeout);
    }/*  w w  w . j  a va  2 s . c o m*/
    ep.setConf(conf);

    long expected = (timeout == 0) ? LlamaAM.EVICTION_IDLE_TIMEOUT_DEFAULT : timeout;
    Assert.assertEquals(expected, ep.getTimeout());

    manualClock.set(1000);
    CacheRMResource cr = Mockito.mock(CacheRMResource.class);
    Mockito.when(cr.getCachedOn()).thenReturn(1000l);
    Assert.assertFalse(ep.shouldEvict(cr));
    manualClock.increment(ep.getTimeout() - 1);
    Assert.assertFalse(ep.shouldEvict(cr));
    manualClock.increment(1);
    Assert.assertTrue(ep.shouldEvict(cr));
    manualClock.increment(1);
    Assert.assertTrue(ep.shouldEvict(cr));
}

From source file:com.cloudera.llama.am.impl.TestGangAntiDeadlockLlamaAM.java

License:Apache License

private Configuration createGangConfig() {
    Configuration conf = new Configuration(false);
    conf.setLong(LlamaAM.GANG_ANTI_DEADLOCK_NO_ALLOCATION_LIMIT_KEY, NO_ALLOCATION_LIMIT);
    conf.setLong(LlamaAM.GANG_ANTI_DEADLOCK_BACKOFF_PERCENT_KEY, BACKOFF_PERCENT);
    conf.setLong(LlamaAM.GANG_ANTI_DEADLOCK_BACKOFF_MIN_DELAY_KEY, BACKOFF_MIN_DELAY);
    conf.setLong(LlamaAM.GANG_ANTI_DEADLOCK_BACKOFF_MAX_DELAY_KEY, BACKOFF_MAX_DELAY);
    return conf;/*w w  w. j  a v  a  2s.  c om*/
}

From source file:com.datasalt.pangool.tuplemr.mapred.lib.output.TestTupleTextInputOutputFormat.java

License:Apache License

@Test
public void testSplits() throws Exception {

    BufferedWriter writer = new BufferedWriter(new FileWriter(IN));
    for (int i = 0; i < 10000; i++) {
        writer.write("str1" + " " + "str2" + " " + "30" + " " + "4000" + "\n");
    }/*w  w  w .j  a v  a  2  s  .c o  m*/
    writer.close();

    Schema schema = new Schema("schema", Fields.parse("a:string, b:string, c:int, d:long"));
    InputFormat inputFormat = new TupleTextInputFormat(schema, false, false, ' ',
            TupleTextInputFormat.NO_QUOTE_CHARACTER, TupleTextInputFormat.NO_ESCAPE_CHARACTER,
            FieldSelector.NONE, TupleTextInputFormat.NO_NULL_STRING);

    Configuration conf = getConf();
    conf.setLong("mapred.min.split.size", 10 * 1024);
    conf.setLong("dfs.block.size", 10 * 1024);
    conf.setLong("mapred.max.split.size", 10 * 1024);

    FileSystem fS = FileSystem.get(conf);
    Path outPath = new Path(OUT);

    MapOnlyJobBuilder mapOnly = new MapOnlyJobBuilder(conf);
    mapOnly.addInput(new Path(IN), inputFormat,
            new MapOnlyMapper<ITuple, NullWritable, NullWritable, NullWritable>() {

                protected void map(ITuple key, NullWritable value, Context context)
                        throws IOException, InterruptedException {
                    Assert.assertEquals("str1", key.get("a").toString());
                    Assert.assertEquals("str2", key.get("b").toString());
                    Assert.assertEquals((Integer) 30, (Integer) key.get("c"));
                    Assert.assertEquals((Long) 4000l, (Long) key.get("d"));
                    context.getCounter("stats", "nlines").increment(1);
                };
            });

    HadoopUtils.deleteIfExists(fS, outPath);
    mapOnly.setOutput(outPath, new HadoopOutputFormat(NullOutputFormat.class), NullWritable.class,
            NullWritable.class);
    Job job = mapOnly.createJob();
    try {
        assertTrue(job.waitForCompletion(true));
    } finally {
        mapOnly.cleanUpInstanceFiles();
    }

    HadoopUtils.deleteIfExists(fS, new Path(IN));

    assertEquals(10000, job.getCounters().getGroup("stats").findCounter("nlines").getValue());
}

From source file:com.facebook.hive.orc.OrcConf.java

License:Open Source License

public static void setLongVar(Configuration conf, ConfVars var, long val) {
    conf.setLong(var.varname, val);
}

From source file:com.facebook.hiveio.conf.LongConfOption.java

License:Apache License

/**
 * Set value for key//from  w ww. ja  v  a2 s.c o  m
 * @param conf Configuration
 * @param value to set
 */
public void set(Configuration conf, long value) {
    conf.setLong(getKey(), value);
}

From source file:com.facebook.hiveio.conf.LongConfOption.java

License:Apache License

/**
 * Set value if it's not already present
 * @param conf Configuration/* ww  w  .j av a 2  s.  c o m*/
 * @param value to set
 */
public void setIfUnset(Configuration conf, long value) {
    if (conf.get(getKey()) == null) {
        conf.setLong(getKey(), value);
    }
}