Example usage for org.apache.hadoop.conf Configuration set

List of usage examples for org.apache.hadoop.conf Configuration set

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration set.

Prototype

public void set(String name, String value) 

Source Link

Document

Set the value of the name property.

Usage

From source file:com.asakusafw.testdriver.inprocess.InProcessJobExecutor.java

License:Apache License

@Override
public void execute(TestExecutionPlan.Job job, Map<String, String> environmentVariables) throws IOException {
    assert job != null;
    LOG.info(MessageFormat.format(Messages.getString("InProcessJobExecutor.infoStartHadoop"), //$NON-NLS-1$
            job.getClassName()));//from   w w w.  j  a  v a2s  .  c om
    List<String> arguments = new ArrayList<>();
    arguments.add(job.getClassName());
    arguments.addAll(computeHadoopJobArguments(job));
    ClassLoader original = Thread.currentThread().getContextClassLoader();
    try {
        Configuration conf = configurations.newInstance();
        synchronized (GLOBAL_SETTINGS) {
            for (Map.Entry<String, String> entry : GLOBAL_SETTINGS.getProperties().entrySet()) {
                conf.set(entry.getKey(), entry.getValue());
            }
        }
        for (Map.Entry<String, String> entry : job.getProperties().entrySet()) {
            conf.set(entry.getKey(), entry.getValue());
        }
        try {
            int exitValue = ApplicationLauncher.exec(conf, arguments.toArray(new String[arguments.size()]));
            if (exitValue != 0) {
                throw new AssertionError(MessageFormat.format(
                        Messages.getString("InProcessJobExecutor.errorNonZeroHadoopExitCode"), //$NON-NLS-1$
                        exitValue, context.getCurrentFlowId()));
            }
        } catch (Exception e) {
            throw (AssertionError) new AssertionError(
                    MessageFormat.format(Messages.getString("InProcessJobExecutor.errorUnknownHadoopException"), //$NON-NLS-1$
                            context.getCurrentFlowId())).initCause(e);
        }
    } finally {
        Thread.currentThread().setContextClassLoader(original);
    }
}

From source file:com.asakusafw.testdriver.OperatorTestEnvironment.java

License:Apache License

/**
 * Invoked before running test case.//from   www. ja v a  2s  .c o  m
 */
protected void before() {
    Configuration conf = createConfig();
    for (Map.Entry<String, String> entry : extraConfigurations.entrySet()) {
        conf.set(entry.getKey(), entry.getValue());
    }
    if (batchArguments.isEmpty() == false) {
        VariableTable variables = new VariableTable(RedefineStrategy.OVERWRITE);
        for (Map.Entry<String, String> entry : batchArguments.entrySet()) {
            variables.defineVariable(entry.getKey(), entry.getValue());
        }
        conf.set(StageConstants.PROP_ASAKUSA_BATCH_ARGS, variables.toSerialString());
    }

    manager = new RuntimeResourceManager(conf);
    try {
        manager.setup();
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:com.asakusafw.testdriver.OperatorTestEnvironment.java

License:Apache License

/**
 * Returns a new configuration object for {@link RuntimeResourceManager}.
 * @return the created configuration object
 *///w  w  w  .j av  a2s  .  c o  m
protected Configuration createConfig() {
    Configuration conf = ConfigurationFactory.getDefault().newInstance();
    URL resource = conf.getClassLoader().getResource(configurationPath);
    if (resource == null && explicitConfigurationPath == false) {
        // if implicit configuration file is not found, we use the embedded default configuration file
        resource = OperatorTestEnvironment.class.getResource(DEFAULT_CONFIGURATION_PATH);
    }
    if (resource == null) {
        throw new IllegalStateException(MessageFormat.format(
                Messages.getString("OperatorTestEnvironment.errorMissingConfigurationFile"), //$NON-NLS-1$
                configurationPath));
    }
    for (Map.Entry<String, String> entry : extraConfigurations.entrySet()) {
        conf.set(entry.getKey(), entry.getValue());
    }
    conf.addResource(resource);
    return conf;
}

From source file:com.asp.tranlog.ImportTsv.java

License:Apache License

/**
 * Sets up the actual job.// www . j a  va2  s  .  c  o m
 * 
 * @param conf
 *            The current configuration.
 * @param args
 *            The command line parameters.
 * @return The newly created job.
 * @throws IOException
 *             When setting up the job fails.
 */
public static Job createSubmittableJob(Configuration conf, String[] args)
        throws IOException, ClassNotFoundException {

    // Support non-XML supported characters
    // by re-encoding the passed separator as a Base64 string.
    String actualSeparator = conf.get(SEPARATOR_CONF_KEY);
    if (actualSeparator != null) {
        conf.set(SEPARATOR_CONF_KEY, new String(Base64.encodeBytes(actualSeparator.getBytes())));
    }

    // See if a non-default Mapper was set
    String mapperClassName = conf.get(MAPPER_CONF_KEY);
    Class mapperClass = mapperClassName != null ? Class.forName(mapperClassName) : DEFAULT_MAPPER;

    String tableName = args[0];
    Path inputDir = new Path(args[1]);
    Job job = new Job(conf, NAME + "_" + tableName);
    job.setJarByClass(mapperClass);
    FileInputFormat.setInputPaths(job, inputDir);

    String inputCodec = conf.get(INPUT_LZO_KEY);
    if (inputCodec == null) {
        FileInputFormat.setMaxInputSplitSize(job, 67108864l); // max split
        // size =
        // 64m
        job.setInputFormatClass(TextInputFormat.class);
    } else {
        if (inputCodec.equalsIgnoreCase("lzo"))
            job.setInputFormatClass(LzoTextInputFormat.class);
        else {
            usage("not supported compression codec!");
            System.exit(-1);
        }
    }

    job.setMapperClass(mapperClass);

    String hfileOutPath = conf.get(BULK_OUTPUT_CONF_KEY);
    if (hfileOutPath != null) {
        HTable table = new HTable(conf, tableName);
        job.setReducerClass(PutSortReducer.class);
        Path outputDir = new Path(hfileOutPath);
        FileOutputFormat.setOutputPath(job, outputDir);
        job.setMapOutputKeyClass(ImmutableBytesWritable.class);
        job.setMapOutputValueClass(Put.class);
        HFileOutputFormat.configureIncrementalLoad(job, table);
    } else {
        // No reducers. Just write straight to table. Call
        // initTableReducerJob
        // to set up the TableOutputFormat.
        TableMapReduceUtil.initTableReducerJob(tableName, null, job);
        job.setNumReduceTasks(0);
    }

    TableMapReduceUtil.addDependencyJars(job);
    TableMapReduceUtil.addDependencyJars(job.getConfiguration(), com.google.common.base.Function.class /*
                                                                                                       * Guava used by TsvParser
                                                                                                       */);
    return job;
}

From source file:com.atlantbh.jmeter.plugins.hadooputilities.jobstatistics.JobLayer.java

License:Apache License

protected JobClient prepareJobClient(String jobTracker) throws IOException {
    Configuration conf = new Configuration();
    conf.set("mapred.job.tracker", jobTracker);

    JobConf jobConf = new JobConf(conf);
    JobClient client = new JobClient(jobConf);

    return client;
}

From source file:com.atlantbh.jmeter.plugins.hbasecomponents.config.HBaseConnectionVariable.java

License:Apache License

public Configuration getConfig() {
    Configuration conf = HBaseConfiguration.create();
    System.out.println("Connecting to  " + zkHost);
    conf.set("hbase.zookeeper.quorum", zkHost);
    conf.set("hbase.client.retries.number", "10");
    return conf;/*from w w  w.  j  ava2 s  . com*/
}

From source file:com.avira.couchdoop.ArgsHelper.java

License:Apache License

public static void setPropertyFromCliArg(Configuration hadoopConf, CommandLine cl, Args.ArgDef arg) {
    String argValue = cl.getOptionValue(arg.getShortName());

    if (argValue != null) {
        hadoopConf.set(arg.getPropertyName(), argValue);
    }/*  www  . jav a  2  s. c  om*/
}

From source file:com.avira.couchdoop.exp.CouchbaseOutputFormat.java

License:Apache License

public static void initJob(Job job, String urls, String bucket, String password) {
    job.setOutputFormatClass(CouchbaseOutputFormat.class);
    job.setOutputKeyClass(String.class);
    job.setOutputValueClass(CouchbaseAction.class);

    Configuration conf = job.getConfiguration();
    conf.set(CouchbaseArgs.ARG_COUCHBASE_URLS.getPropertyName(), urls);
    conf.set(CouchbaseArgs.ARG_COUCHBASE_BUCKET.getPropertyName(), bucket);
    conf.set(CouchbaseArgs.ARG_COUCHBASE_PASSWORD.getPropertyName(), password);
}

From source file:com.avira.couchdoop.imp.CouchbaseViewInputFormat.java

License:Apache License

public static void initJob(Job job, String urls, String bucket, String password, String designDocumentName,
        String viewName, String viewKeys) {
    job.setInputFormatClass(CouchbaseViewInputFormat.class);

    Configuration conf = job.getConfiguration();
    conf.set(CouchbaseArgs.ARG_COUCHBASE_URLS.getPropertyName(), urls);
    conf.set(CouchbaseArgs.ARG_COUCHBASE_BUCKET.getPropertyName(), bucket);
    conf.set(CouchbaseArgs.ARG_COUCHBASE_PASSWORD.getPropertyName(), password);
    conf.set(ImportViewArgs.ARG_DESIGNDOC_NAME.getPropertyName(), designDocumentName);
    conf.set(ImportViewArgs.ARG_VIEW_NAME.getPropertyName(), viewName);
    conf.set(ImportViewArgs.ARG_VIEW_KEYS.getPropertyName(), viewKeys);
}

From source file:com.awcoleman.StandaloneJava.AvroCombinerByBlock.java

License:Apache License

public AvroCombinerByBlock(String inDirStr, String outDirStr, String handleExisting) throws IOException {

    //handle both an output directory and an output filename (ending with .avro)
    String outputFilename = DEFAULTOUTPUTFILENAME;
    if (outDirStr.endsWith(".avro")) {
        isOutputNameSpecifiedAndAFile = true;
        //String[] outputParts = outDirStr.split(":?\\\\");
        String[] outputParts = outDirStr.split("/");

        outputFilename = outputParts[outputParts.length - 1];

        //remove outputFilename from outDirStr to get new outDirStr which is just directory (and trailing /)
        outDirStr = outDirStr.replaceAll(Pattern.quote(outputFilename), "");
        outDirStr = outDirStr.substring(0, outDirStr.length() - (outDirStr.endsWith("/") ? 1 : 0));
    }//from   ww  w. j  av a 2s .co m

    //Get block size - not needed
    //long hdfsBlockSize = getBlockSize();
    //System.out.println("HDFS FS block size: "+hdfsBlockSize);

    //Get list of input files
    ArrayList<FileStatus> inputFileList = new ArrayList<FileStatus>();

    Configuration conf = new Configuration();
    conf.addResource(new Path("/etc/hadoop/conf/core-site.xml"));
    conf.set("dfs.replication", "1"); //see http://stackoverflow.com/questions/24548699/how-to-append-to-an-hdfs-file-on-an-extremely-small-cluster-3-nodes-or-less

    FileSystem hdfs = null;
    try {
        hdfs = FileSystem.get(conf);
    } catch (java.io.IOException ioe) {
        System.out.println("Error opening HDFS filesystem. Exiting. Error message: " + ioe.getMessage());
        System.exit(1);
    }
    if (hdfs.getStatus() == null) {
        System.out.println("Unable to contact HDFS filesystem. Exiting.");
        System.exit(1);
    }

    //Check if input and output dirs exist
    Path inDir = new Path(inDirStr);
    Path outDir = new Path(outDirStr);
    if (!(hdfs.exists(inDir) || hdfs.isDirectory(inDir))) {
        System.out.println("Input directory ( " + inDirStr + " ) not found or is not directory. Exiting.");
        System.exit(1);
    }

    if (!(hdfs.exists(outDir) || hdfs.isDirectory(outDir))) {
        if (hdfs.exists(outDir)) { //outDir exists and is a symlink or file, must die
            System.out.println("Requested output directory name ( " + outDirStr
                    + " ) exists but is not a directory. Exiting.");
            System.exit(1);
        } else {
            hdfs.mkdirs(outDir);
        }
    }

    RemoteIterator<LocatedFileStatus> fileStatusListIterator = hdfs.listFiles(inDir, true);
    while (fileStatusListIterator.hasNext()) {
        LocatedFileStatus fileStatus = fileStatusListIterator.next();

        if (fileStatus.isFile() && !fileStatus.getPath().getName().equals("_SUCCESS")) {
            inputFileList.add((FileStatus) fileStatus);
        }
    }

    if (inputFileList.size() <= 1 && !isOutputNameSpecifiedAndAFile) { //If an output file is specified assume we just want a rename.
        System.out.println("Only one or zero files found in input directory ( " + inDirStr + " ). Exiting.");
        System.exit(1);
    }

    //Get Schema and Compression Codec from seed file since we need it for the writer
    Path firstFile = inputFileList.get(0).getPath();
    FsInput fsin = new FsInput(firstFile, conf);
    DataFileReader<Object> dfrFirstFile = new DataFileReader<Object>(fsin, new GenericDatumReader<Object>());
    Schema fileSchema = dfrFirstFile.getSchema();
    String compCodecName = dfrFirstFile.getMetaString("avro.codec");
    //compCodecName should be null, deflate, snappy, or bzip2
    if (compCodecName == null) {
        compCodecName = "deflate"; //set to deflate even though original is no compression
    }
    dfrFirstFile.close();

    //Create Empty HDFS file in output dir
    String seedFileStr = outDirStr + "/" + outputFilename;
    Path seedFile = new Path(seedFileStr);
    FSDataOutputStream hdfsdos = null;
    try {
        hdfsdos = hdfs.create(seedFile, false);
    } catch (org.apache.hadoop.fs.FileAlreadyExistsException faee) {
        if (handleExisting.equals("overwrite")) {
            hdfs.delete(seedFile, false);
            hdfsdos = hdfs.create(seedFile, false);
        } else if (handleExisting.equals("append")) {
            hdfsdos = hdfs.append(seedFile);
        } else {
            System.out
                    .println("File " + seedFileStr + " exists and will not overwrite. handleExisting is set to "
                            + handleExisting + ". Exiting.");
            System.exit(1);
        }
    }
    if (hdfsdos == null) {
        System.out.println("Unable to create or write to output file ( " + seedFileStr
                + " ). handleExisting is set to " + handleExisting + ". Exiting.");
        System.exit(1);
    }

    //Append other files
    GenericDatumWriter gdw = new GenericDatumWriter(fileSchema);
    DataFileWriter dfwBase = new DataFileWriter(gdw);
    //Set compression to that found in the first file
    dfwBase.setCodec(CodecFactory.fromString(compCodecName));

    DataFileWriter dfw = dfwBase.create(fileSchema, hdfsdos);
    for (FileStatus thisFileStatus : inputFileList) {

        //_SUCCESS files are 0 bytes
        if (thisFileStatus.getLen() == 0) {
            continue;
        }

        FsInput fsin1 = new FsInput(thisFileStatus.getPath(), conf);
        DataFileReader dfr = new DataFileReader<Object>(fsin1, new GenericDatumReader<Object>());

        dfw.appendAllFrom(dfr, false);

        dfr.close();
    }

    dfw.close();
    dfwBase.close();

}