Example usage for org.apache.hadoop.conf Configuration get

List of usage examples for org.apache.hadoop.conf Configuration get

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration get.

Prototype

public String get(String name) 

Source Link

Document

Get the value of the name property, null if no such property exists.

Usage

From source file:com.asakusafw.runtime.stage.inprocess.InProcessStageConfiguratorTest.java

License:Apache License

/**
 * activate alternative properties./*from w ww .  j  av a 2s .  c  o m*/
 * @throws Exception if failed
 */
@Test
public void activate_properties() throws Exception {
    Job job = newJob();
    Configuration conf = job.getConfiguration();

    conf.setLong(KEY_LIMIT, 100);
    conf.set(KEY_PREFIX_REPLACE + "com.example.testing", "YES!");
    new Mock(100).configure(job);
    assertThat(conf.get("com.example.testing"), is("YES!"));
}

From source file:com.asakusafw.runtime.stage.inprocess.InProcessStageConfiguratorTest.java

License:Apache License

/**
 * activate alternative properties.//from   w ww. ja va  2s  .  c  om
 * @throws Exception if failed
 */
@Test
public void activate_properties_skip() throws Exception {
    Job job = newJob();
    Configuration conf = job.getConfiguration();

    conf.setLong(KEY_LIMIT, 100);
    conf.set(KEY_PREFIX_REPLACE + "com.example.testing", "YES!");
    new Mock(1000).configure(job);
    assertThat(conf.get("com.example.testing"), is(not("YES!")));
}

From source file:com.asakusafw.runtime.stage.optimizer.LibraryCopySuppressionConfigurator.java

License:Apache License

@Override
public void configure(Job job) throws IOException, InterruptedException {
    Configuration conf = job.getConfiguration();
    if (conf.getBoolean(KEY_ENABLED, DEFAULT_ENABLED) == false) {
        return;//from  w  ww  .j  av  a  2  s .  c  o  m
    }
    // activates only if application launcher is used
    if (conf.getBoolean(ApplicationLauncher.KEY_LAUNCHER_USED, false) == false) {
        return;
    }
    if (JobCompatibility.isLocalMode(job) == false) {
        return;
    }
    String libraries = conf.get(KEY_CONF_LIBRARIES);
    if (libraries == null || libraries.isEmpty()) {
        return;
    }
    Set<String> loaded = new HashSet<>();
    ClassLoader loader = conf.getClassLoader();
    if (loader instanceof URLClassLoader) {
        for (URL url : ((URLClassLoader) loader).getURLs()) {
            try {
                loaded.add(url.toURI().toString());
            } catch (URISyntaxException e) {
                LOG.warn(MessageFormat.format("Failed to analyze classpath: {0}", url));
            }
        }
    }
    if (loaded.isEmpty()) {
        return;
    }
    StringBuilder result = new StringBuilder();
    for (String library : libraries.split(",")) { //$NON-NLS-1$
        if (loaded.contains(library)) {
            if (LOG.isDebugEnabled()) {
                LOG.debug(MessageFormat.format("Keep library: {0}", library)); //$NON-NLS-1$
            }
        } else {
            if (LOG.isDebugEnabled()) {
                LOG.debug(MessageFormat.format("Suppress library: {0}", library)); //$NON-NLS-1$
            }
            if (result.length() != 0) {
                result.append(',');
            }
            result.append(library);
        }
    }
    if (result.length() > 0) {
        conf.set(KEY_CONF_LIBRARIES, result.toString());
    } else {
        if (CONFIGURATION_UNSET != null) {
            try {
                CONFIGURATION_UNSET.invoke(conf, KEY_CONF_LIBRARIES);
                return;
            } catch (Exception e) {
                LOG.warn(MessageFormat.format("Failed to invoke {0}", CONFIGURATION_UNSET), e);
            }
        }
        String newLibraries = selectLibraries(libraries);
        conf.set(KEY_CONF_LIBRARIES, newLibraries);
    }
}

From source file:com.asakusafw.runtime.stage.resource.StageResourceDriver.java

License:Apache License

/**
 * Creates a new instance.//  w w w  .  j  av  a2  s .c  o  m
 * @param configuration the current configuration
 * @throws IOException if failed to initialize this driver
 * @throws IllegalArgumentException if the parameter is {@code null}
 */
public StageResourceDriver(Configuration configuration) throws IOException {
    if (configuration == null) {
        throw new IllegalArgumentException("configuration must not be null"); //$NON-NLS-1$
    }
    this.configuration = configuration;
    this.localFileSystem = FileSystem.getLocal(configuration);
    this.accessMode = AccessMode.decode(configuration.get(KEY_ACCESS_MODE));
}

From source file:com.asakusafw.testdriver.inprocess.InProcessJobExecutorTest.java

License:Apache License

/**
 * Test method for executing Hadoop job w/ properties.
 */// w w  w .  j  a v a2s.  c  o m
@Test
public void executeJob_w_properties() {
    prepareJobflow();
    final AtomicBoolean call = new AtomicBoolean();
    MockHadoopJob.callback(new MockHadoopJob.Callback() {
        @Override
        public int run(String[] args, Configuration conf) {
            call.set(true);
            assertThat(conf.get("com.example.testing"), is("true"));
            return 0;
        }
    });

    TestExecutionPlan.Job job = job(MockHadoopJob.class.getName(), "com.example.testing", "true");

    JobExecutor executor = new InProcessJobExecutor(context);
    try {
        executor.execute(job, Collections.<String, String>emptyMap());
    } catch (IOException e) {
        throw new AssertionError(e);
    }
    assertThat(call.get(), is(true));
}

From source file:com.asakusafw.testdriver.inprocess.InProcessJobExecutorTest.java

License:Apache License

/**
 * Test method for executing Hadoop job w/ {@code asakusa-resources.xml}.
 *//*from  w  w w.  j a  v a  2 s.c  o m*/
@Test
public void executeJob_w_resources() {
    prepareJobflow();
    final AtomicBoolean call = new AtomicBoolean();
    MockHadoopJob.callback(new MockHadoopJob.Callback() {
        @Override
        public int run(String[] args, Configuration conf) {
            call.set(true);
            assertThat(conf.get("com.example.testing"), is("true"));
            return 0;
        }
    });

    JobExecutor executor = new InProcessJobExecutor(context);
    deploy("dummy.xml", new File(framework.getHome(), InProcessJobExecutor.PATH_ASAKUSA_RESOURCES));
    try {
        executor.execute(job(MockHadoopJob.class.getName()), Collections.<String, String>emptyMap());
    } catch (IOException e) {
        throw new AssertionError(e);
    }
    assertThat(call.get(), is(true));
}

From source file:com.asakusafw.thundergate.runtime.cache.mapreduce.Invalidation.java

License:Apache License

/**
 * Initializes invalidation timestamp./*from   w w  w  .  ja v  a 2  s .  c  o  m*/
 * @param configuration the target configuration
 * @param tableName the target table name
 */
public static void setupInvalidationTimestamp(Configuration configuration, String tableName) {
    long timestamp = getTimestamp(configuration);
    if (timestamp > 0L && isTarget(configuration, tableName)) {
        LOG.info(MessageFormat.format("enabling ThunderGate cache invalidation: {0} until {1}", tableName,
                configuration.get(KEY_INVALIDATION_TIMESTAMP)));
        configuration.setLong(KEY_INTERNAL_TIMESTAMP, timestamp);
    }
}

From source file:com.asakusafw.thundergate.runtime.cache.mapreduce.Invalidation.java

License:Apache License

private static long getTimestamp(Configuration conf) {
    String until = conf.get(KEY_INVALIDATION_TIMESTAMP);
    if (until == null) {
        LOG.debug(MessageFormat.format("invalidation timstamp is not set: {0}", KEY_INVALIDATION_TIMESTAMP));
        return 0L;
    }//w  ww.j av a 2  s  .c  om
    LOG.debug(MessageFormat.format("invalidation timstamp: {0}={1}", KEY_INVALIDATION_TIMESTAMP, until));
    long timestamp = DateUtil.parseDateTime(until, '-', ' ', ':');
    if (timestamp < 0) {
        throw new IllegalArgumentException(
                MessageFormat.format("invalid timestamp: {0}={1}", KEY_INVALIDATION_TIMESTAMP, until));
    }
    return timestamp;
}

From source file:com.asakusafw.thundergate.runtime.cache.mapreduce.Invalidation.java

License:Apache License

private static boolean isTarget(Configuration conf, String table) {
    String pattern = conf.get(KEY_INVALIDATION_TARGET);
    if (pattern == null) {
        LOG.debug(MessageFormat.format("invalidation target is not set: {0}", KEY_INVALIDATION_TARGET));
        return false;
    }/*from   w  w  w  . j  av  a  2 s .  c  o  m*/
    try {
        boolean matched = Pattern.compile(pattern).matcher(table).matches();
        LOG.debug(MessageFormat.format("invalidation target matching: {0}=\"{1}\" / \"{2}\" => {3}",
                KEY_INVALIDATION_TARGET, pattern, table, matched));
        return matched;
    } catch (PatternSyntaxException e) {
        throw new IllegalArgumentException(
                MessageFormat.format("invalid table name pattern: {0}={1}", KEY_INVALIDATION_TARGET, pattern));
    }
}

From source file:com.asp.tranlog.ImportTsv.java

License:Apache License

/**
 * Sets up the actual job./*from  www .j  a  va  2 s .  c o  m*/
 * 
 * @param conf
 *            The current configuration.
 * @param args
 *            The command line parameters.
 * @return The newly created job.
 * @throws IOException
 *             When setting up the job fails.
 */
public static Job createSubmittableJob(Configuration conf, String[] args)
        throws IOException, ClassNotFoundException {

    // Support non-XML supported characters
    // by re-encoding the passed separator as a Base64 string.
    String actualSeparator = conf.get(SEPARATOR_CONF_KEY);
    if (actualSeparator != null) {
        conf.set(SEPARATOR_CONF_KEY, new String(Base64.encodeBytes(actualSeparator.getBytes())));
    }

    // See if a non-default Mapper was set
    String mapperClassName = conf.get(MAPPER_CONF_KEY);
    Class mapperClass = mapperClassName != null ? Class.forName(mapperClassName) : DEFAULT_MAPPER;

    String tableName = args[0];
    Path inputDir = new Path(args[1]);
    Job job = new Job(conf, NAME + "_" + tableName);
    job.setJarByClass(mapperClass);
    FileInputFormat.setInputPaths(job, inputDir);

    String inputCodec = conf.get(INPUT_LZO_KEY);
    if (inputCodec == null) {
        FileInputFormat.setMaxInputSplitSize(job, 67108864l); // max split
        // size =
        // 64m
        job.setInputFormatClass(TextInputFormat.class);
    } else {
        if (inputCodec.equalsIgnoreCase("lzo"))
            job.setInputFormatClass(LzoTextInputFormat.class);
        else {
            usage("not supported compression codec!");
            System.exit(-1);
        }
    }

    job.setMapperClass(mapperClass);

    String hfileOutPath = conf.get(BULK_OUTPUT_CONF_KEY);
    if (hfileOutPath != null) {
        HTable table = new HTable(conf, tableName);
        job.setReducerClass(PutSortReducer.class);
        Path outputDir = new Path(hfileOutPath);
        FileOutputFormat.setOutputPath(job, outputDir);
        job.setMapOutputKeyClass(ImmutableBytesWritable.class);
        job.setMapOutputValueClass(Put.class);
        HFileOutputFormat.configureIncrementalLoad(job, table);
    } else {
        // No reducers. Just write straight to table. Call
        // initTableReducerJob
        // to set up the TableOutputFormat.
        TableMapReduceUtil.initTableReducerJob(tableName, null, job);
        job.setNumReduceTasks(0);
    }

    TableMapReduceUtil.addDependencyJars(job);
    TableMapReduceUtil.addDependencyJars(job.getConfiguration(), com.google.common.base.Function.class /*
                                                                                                       * Guava used by TsvParser
                                                                                                       */);
    return job;
}