Example usage for org.apache.hadoop.conf Configuration writeXml

List of usage examples for org.apache.hadoop.conf Configuration writeXml

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration writeXml.

Prototype

public void writeXml(Writer out) throws IOException 

Source Link

Usage

From source file:org.kitesdk.data.oozie.TestKiteConfigurationService.java

License:Apache License

private void setupKiteConfigurationService(Path kiteConfigLocation, boolean loadKiteService,
        boolean loadPrimaryConfig, boolean loadSecondaryConfig) throws IOException, ServiceException {
    File confDir = new File(serviceTempDir, "conf");
    File hadoopConfDir = new File(confDir, "hadoop-conf");
    File actionConfDir = new File(confDir, "action-conf");
    confDir.mkdirs();/*from   www. java 2s  .co  m*/
    hadoopConfDir.mkdirs();
    actionConfDir.mkdirs();

    Path kiteConfDir = new Path(kiteConfigLocation, "kite-conf");
    fs.mkdirs(kiteConfDir);

    // these may need to be forced to local FS
    File oozieSiteTarget = new File(confDir, "oozie-site.xml");
    File hadoopConfTarget = new File(hadoopConfDir, "hadoop-site.xml");

    // we'll want tests for this with both local and hdfs files
    Path primaryConfTarget = new Path(kiteConfDir, "primary-site.xml");
    Path secondaryConfTarget = new Path(kiteConfDir, "secondary-site.xml");

    Configuration oozieSiteConf = new Configuration(false);

    List<String> configs = new ArrayList<String>();
    if (loadPrimaryConfig) {
        configs.add(primaryConfTarget.toString());
    }

    if (loadSecondaryConfig) {
        configs.add(secondaryConfTarget.toString());
    }

    if (!configs.isEmpty()) {
        oozieSiteConf.set("oozie.service.KiteConfigurationService.kite.configuration",
                Joiner.on(",").join(configs));
    }

    oozieSiteConf.set("oozie.services", "org.apache.oozie.service.HadoopAccessorService");
    FileOutputStream oozieStream = new FileOutputStream(oozieSiteTarget);
    oozieSiteConf.writeXml(oozieStream);
    oozieStream.close();
    FileOutputStream hadoopStream = new FileOutputStream(hadoopConfTarget);
    conf.writeXml(hadoopStream);
    hadoopStream.close();

    if (loadPrimaryConfig) {
        Configuration primaryConf = new Configuration(false);
        primaryConf.set("test.property", "test.value");

        FSDataOutputStream primaryStream = fs.create(primaryConfTarget);
        primaryConf.writeXml(primaryStream);
        primaryStream.close();
    }

    if (loadSecondaryConfig) {
        Configuration secondaryConf = new Configuration(false);
        secondaryConf.set("second.property", "second.value");

        FSDataOutputStream secondaryStream = fs.create(secondaryConfTarget);
        secondaryConf.writeXml(secondaryStream);
        secondaryStream.close();
    }

    // set to the temp directory
    System.setProperty("oozie.home.dir", serviceTempDir.getAbsolutePath());

    Services services = new Services();
    services.init();
    if (loadKiteService) {
        services.setService(KiteConfigurationService.class);
    }
}

From source file:org.lab41.mapreduce.IdUsingBulkLoaderDriver.java

License:Apache License

public int configureAndRunJobs(Configuration conf) throws IOException, ClassNotFoundException,
        InterruptedException, StorageException, InstantiationException, IllegalAccessException {

    logger.info("IdUsingBulkLoaderDriver");

    Configuration baseConfiguration = getConf();

    StringWriter stringWriter = new StringWriter();
    baseConfiguration.writeXml(stringWriter);
    stringWriter.toString();//from   ww  w .  j  a va  2 s. c  o m

    getAdditionalProperties(baseConfiguration, propsPath);
    getAdditionalProperties(baseConfiguration, sysPath);

    String hbaseSiteXmlPath = hbaseSiteXml;
    InputStream hbaseSiteXmlIS = getInputStreamForPath(hbaseSiteXmlPath, baseConfiguration);

    configureHbase(baseConfiguration, hbaseSiteXmlIS);

    //Configure the First adn Second jobs
    FaunusGraph faunusGraph = new FaunusGraph(baseConfiguration);

    String job1Outputpath = faunusGraph.getOutputLocation().toString();
    Path intermediatePath = new Path(job1Outputpath + "/job1");

    FileSystem fs = FileSystem.get(baseConfiguration);
    Job job1 = configureJob1(faunusGraph, intermediatePath, baseConfiguration, fs);
    Job job2 = configureJob2(baseConfiguration, faunusGraph, fs);

    //no longer need the faunus graph.
    faunusGraph.shutdown();
    if (job1.waitForCompletion(true)) {
        logger.info("SUCCESS 1: Cleaning up HBASE ");
        HBaseAdmin hBaseAdmin = new HBaseAdmin(baseConfiguration);
        hBaseAdmin.majorCompact(baseConfiguration.get("faunus.graph.output.titan.storage.tablename"));

        boolean betweenSplit = conf.getBoolean(BETWEEN_SPLIT_KEY, BETWEEN_SPLIT_DEFUALT);
        if (betweenSplit) {
            hBaseAdmin.split(baseConfiguration.get("faunus.graph.output.titan.storage.tablename"));
        }
        hBaseAdmin.balancer();

        logger.info("HBASE Clean up complete- starting next job");

        if (job2.waitForCompletion(true)) {
            logger.info("SUCCESS 2");
        }

    }
    return 1;
}

From source file:org.lilyproject.hadooptestfw.HBaseProxy.java

License:Apache License

/**
 * Dumps the hadoop and hbase configuration. Useful as a reference if other applications want to use the
 * same configuration to connect with the hadoop cluster.
 *
 * @param testHome directory in which to dump the configuration (it will create a conf subdir inside)
 * @param conf     the configuration/*from w ww. j a  va  2 s. co m*/
 */
private void writeConfiguration(File testHome, Configuration conf) throws IOException {
    final File confDir = new File(testHome, "conf");
    final boolean confDirCreated = confDir.mkdir();
    if (!confDirCreated) {
        throw new IOException("failed to create " + confDir);
    }

    // dumping everything into multiple xxx-site.xml files.. so that the expected files are definitely there
    for (String filename : Arrays.asList("core-site.xml", "mapred-site.xml")) {
        final BufferedOutputStream out = new BufferedOutputStream(
                new FileOutputStream(new File(confDir, filename)));
        try {
            conf.writeXml(out);
        } finally {
            out.close();
        }
    }
}

From source file:org.openflamingo.engine.configuration.ConfigurationUtils.java

License:Apache License

/**
 * {@link org.apache.hadoop.conf.Configuration}? XML .
 *
 * @param conf {@link org.apache.hadoop.conf.Configuration}
 * @return XML/*from   w  w  w  . j a  v a2  s  .  c  o m*/
 */
public static String configurationToXml(Configuration conf) {
    try {
        ByteArrayOutputStream baos = new ByteArrayOutputStream();
        conf.writeXml(baos);
        return new String(baos.toByteArray());
    } catch (Exception e) {
        throw new WorkflowException("Hadoop Configuration? XML   .", e);
    }
}

From source file:org.pentaho.hbase.shim.common.CommonHBaseConnectionTest.java

License:Apache License

private static void addToCustomHbaseConfigFile() throws Exception {
    Configuration configuration = new Configuration();
    configuration.addResource("hbase-default.xml");
    if (configuration.get(HBaseConnection.HBASE_VERSION_KEY) != null) {
        Configuration conf = new Configuration(false);
        //if we don't set hbase version according to valid one in shim exception is thrown, so
        //in new file we set the versio property read from default one from hadoop libraries for provider
        conf.set(HBaseConnection.HBASE_VERSION_KEY, configuration.get(HBaseConnection.HBASE_VERSION_KEY));
        conf.set(HBaseConnection.ZOOKEEPER_QUORUM_KEY, ZOOKEEPER_QUORUM_CONFIG_TEST_FILE);
        confFile = new File(System.getProperty("user.dir") + File.separator + "hbase-default-1.xml");
        FileOutputStream out = new FileOutputStream(confFile);
        conf.writeXml(new DataOutputStream(out));
        out.close();/*from   www  .  ja va  2s .  c o m*/
    }
}

From source file:org.springframework.data.hadoop.impala.mapreduce.MapReduceCommands.java

License:Apache License

/**
 * wirte the Hadoop configuration to one directory, 
 * file name is "core-site.xml", "hdfs-site.xml" and "mapred-site.xml".
 * /*from  w  w  w .  j  a  va  2 s.  c  om*/
 * @param configDir the directory that the file be written
 * @param config Hadoop configuration
 * 
 */
public void writeHadoopConfiguration(File configDir, Configuration config) {
    configDir.mkdirs();
    try {
        FileOutputStream fos = new FileOutputStream(new File(configDir + Path.SEPARATOR + "core-site.xml"));
        config.writeXml(fos);
        fos = new FileOutputStream(new File(configDir + Path.SEPARATOR + "hdfs-site.xml"));
        config.writeXml(fos);
        fos = new FileOutputStream(new File(configDir + Path.SEPARATOR + "mapred-site.xml"));
        config.writeXml(fos);
    } catch (Exception e) {
        LOG.severe("Save user's configuration failed. Message:" + e.getMessage());
    }

}

From source file:org.springframework.data.hadoop.impala.mapreduce.MapReduceCommandsTest.java

License:Apache License

/**
 * Test method for {@link org.springframework.data.hadoop.impala.mapreduce.MapReduceCommands#submit(java.lang.String)}.
 * @throws Exception //from ww  w . ja v  a 2 s .co m
 */
@Test
public void testSubmit() throws Exception {
    Configuration jobConfig = new Configuration(false);

    Configuration hadoopConfig = mrCmds.getHadoopConfiguration();

    FsShell shell = new FsShell(hadoopConfig);
    List<String> argv = new ArrayList<String>();
    argv.add("-rmr");
    argv.add("/tmp/wc-input");
    shell.run(argv.toArray(new String[0]));

    argv = new ArrayList<String>();
    argv.add("-put");
    File f = new File("src/test/resources/wordcount-conf.xml");
    argv.add(f.getAbsolutePath());
    argv.add("/tmp/wc-input/");
    shell.run(argv.toArray(new String[0]));

    argv = new ArrayList<String>();
    argv.add("-rmr");
    argv.add("/tmp/wc-output");
    shell.run(argv.toArray(new String[0]));

    String hadoopFsName = hadoopConfig.get("fs.default.name");
    String hadoopJT = hadoopConfig.get("mapred.job.tracker");
    File jarFile = new File(hadoopExampleJarFile);

    jobConfig.set("fs.default.name", hadoopFsName);
    jobConfig.set("mapred.job.tracker", hadoopJT);
    jobConfig.set("mapred.jar", jarFile.getAbsolutePath());
    jobConfig.set("mapred.input.dir", "/tmp/wc-input");
    jobConfig.set("mapred.output.dir", "/tmp/wc-output");
    jobConfig.set("mapreduce.map.class", "org.apache.hadoop.examples.WordCount.TokenizerMapper");
    jobConfig.set("mapreduce.reduce.class", "org.apache.hadoop.examples.WordCount.IntSumReducer");

    String tmpFile = "/tmp/impala-test-wordcount-conf.xml";
    try {
        jobConfig.writeXml(new FileOutputStream(new File(tmpFile)));
    } catch (Exception e) {
        Assert.fail("fail to write temp MR configuration file");
    }

    mrCmds.submit(tmpFile);
}

From source file:org.springframework.data.hadoop.mapreduce.JarExecutor.java

License:Apache License

@Override
protected void preExecution(Configuration cfg) {
    // generate a name
    configName = "Custom-cfg-for- " + jar + "-" + UUID.randomUUID();
    try {/*from   w  w  w  .j ava2s.  c  om*/
        savedConfiguration = File.createTempFile("SHDP-jar-cfg-", null);
        cfg.writeXml(new FileOutputStream(savedConfiguration));
        // don't use addDefaultResource because it has side-effects
        //Configuration.addDefaultResource(configName);
        defaultResources().add(configName);
    } catch (IOException ex) {
        throw new IllegalArgumentException("Cannot set custom configuration", ex);
    }
}

From source file:org.talend.components.hadoopcluster.runtime.configuration.HadoopAmbariClusterService.java

License:Open Source License

@Override
public String getConfFileContent(String confFileName) {
    Configuration conf = confs.get(confFileName);
    if (conf == null) {
        return null;
    }//from w w w .j  a  v  a  2 s .c o  m
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    try {
        conf.writeXml(baos);
    } catch (IOException e) {
        throw new ComponentException(e);
    }
    try {
        return baos.toString("UTF-8");
    } catch (UnsupportedEncodingException e) {
        throw new ComponentException(e);
    }
}

From source file:org.talend.components.hadoopcluster.runtime.configuration.HadoopAmbariClusterService.java

License:Open Source License

private void exportConfigurationToXml(String folderPath, String confName) {
    Configuration conf = confs.get(confName);
    if (conf == null) {
        return;/*from   ww  w .j  av a  2  s .  c o  m*/
    }
    File confFile = new File(folderPath, confName + ".xml"); //$NON-NLS-1$
    confFile.getParentFile().mkdirs();
    OutputStream os;
    try {
        os = new FileOutputStream(confFile.getAbsolutePath());
        conf.writeXml(os);
    } catch (FileNotFoundException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
}