Example usage for org.apache.hadoop.conf Configuration writeXml

List of usage examples for org.apache.hadoop.conf Configuration writeXml

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration writeXml.

Prototype

public void writeXml(Writer out) throws IOException 

Source Link

Usage

From source file:com.cloudera.hoop.client.fs.TestHoopFileSystem.java

License:Open Source License

private void createHoopServer() throws Exception {
    File homeDir = getTestDir();//from w  w w .ja v a2 s .c o m
    Assert.assertTrue(new File(homeDir, "conf").mkdir());
    Assert.assertTrue(new File(homeDir, "log").mkdir());
    Assert.assertTrue(new File(homeDir, "temp").mkdir());
    HoopServer.setHomeDirForCurrentThread(homeDir.getAbsolutePath());

    String fsDefaultName = getHadoopConf().get("fs.default.name");
    Configuration conf = new Configuration(false);
    conf.set("hoop.hadoop.conf:fs.default.name", fsDefaultName);
    conf.set("hoop.base.url", getJettyURL().toExternalForm());
    conf.set("hoop.proxyuser." + getHadoopProxyUser() + ".groups", getHadoopProxyUserGroups());
    conf.set("hoop.proxyuser." + getHadoopProxyUser() + ".hosts", getHadoopProxyUserHosts());
    File hoopSite = new File(new File(homeDir, "conf"), "hoop-site.xml");
    OutputStream os = new FileOutputStream(hoopSite);
    conf.writeXml(os);
    os.close();

    File currentDir = new File("foo").getAbsoluteFile().getParentFile();
    if (currentDir.getName().equals("target")) {
        currentDir = currentDir.getParentFile();
    }
    if (currentDir.getName().equals("hoop-client")) {
        currentDir = currentDir.getParentFile();
    }
    File hoopDir = new File(currentDir, "hoop-webapp");
    Assert.assertTrue(hoopDir.exists(),
            "Could not locate hoop-webapp source dir: " + hoopDir.getAbsolutePath());
    String hoopWebAppDir = new File(new File(new File(hoopDir, "src"), "main"), "webapp").getAbsolutePath();
    WebAppContext context = new WebAppContext(hoopWebAppDir, "/");

    Server server = getJettyServer();
    server.addHandler(context);
    server.start();
}

From source file:com.cloudera.kitten.util.LocalDataHelper.java

License:Open Source License

public void copyConfiguration(String key, Configuration conf) throws IOException {
    File tmpFile = File.createTempFile("job", ".xml");
    tmpFile.deleteOnExit();/* w ww.j  a  va 2  s  . c  o  m*/
    OutputStream os = new FileOutputStream(tmpFile);
    conf.writeXml(os);
    os.close();
    copyToHdfs(key, tmpFile.getAbsolutePath());
}

From source file:com.cloudera.llama.server.TestAbstractMain.java

License:Apache License

private void createMainConf(String confDir, Configuration conf) throws Exception {
    System.setProperty(AbstractMain.CONF_DIR_SYS_PROP, confDir);
    conf.setIfUnset(LlamaAM.RM_CONNECTOR_CLASS_KEY, MockRMConnector.class.getName());
    conf.set(sConf.getPropertyName(ServerConfiguration.SERVER_ADDRESS_KEY), "localhost:0");
    conf.set(sConf.getPropertyName(ServerConfiguration.HTTP_ADDRESS_KEY), "localhost:0");
    Writer writer = new FileWriter(new File(confDir, "llama-site.xml"));
    conf.writeXml(writer);
    writer.close();/*  w w  w  .  j  a v a 2s  . c  o  m*/
}

From source file:com.github.sakserv.minicluster.oozie.util.OozieConfigHelpers.java

License:Apache License

public static void writeXml(Configuration configuration, String outputLocation) throws IOException {
    new File(new File(outputLocation).getParent()).mkdirs();
    configuration.writeXml(new FileOutputStream(outputLocation));
}

From source file:com.github.sakserv.minicluster.oozie.util.OozieConfigUtil.java

License:Apache License

public void writeXml(Configuration configuration, String outputLocation) throws IOException {
    new File(new File(outputLocation).getParent()).mkdirs();
    configuration.writeXml(new FileOutputStream(outputLocation));
}

From source file:com.google.appengine.tools.mapreduce.ConfigurationXmlUtil.java

License:Apache License

/**
 * Serializes a configuration to its corresponding XML document as a string.
 *
 * @param conf the configuration to encode
 * @return the configuration as an XML document
 *///from   w  ww  .  j  a v  a2 s .co m
public static String convertConfigurationToXml(Configuration conf) {
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    try {
        conf.writeXml(baos);
        return baos.toString("UTF8");
    } catch (IOException e) {
        throw new RuntimeException(
                "Got an IOException writing to ByteArrayOutputStream. This should never happen.", e);
    }
}

From source file:com.ibm.jaql.MiniCluster.java

License:Apache License

/**
 * @param jconf//  w  w w  .ja v a 2  s  .  c o m
 * @param conf
 * @throws IOException
 */
protected void setupOverride(Configuration jconf, Configuration conf) throws IOException {
    // require that override dir exists
    File overrideDir = new File(System.getProperty("hadoop.conf.override"));
    if (!overrideDir.exists()) {
        throw new IOException("hadoop-override dir must exist");
    }

    // write out the JobConf from MiniMR to the override dir
    jconf.writeXml(System.out);
    System.out.println();
    System.out.flush();
    conf.set("mapred.job.tracker", jconf.get("mapred.job.tracker", null));
    String name = "mapred.job.tracker.info.port";
    String addr = jconf.get(name, null);
    if (addr == null) {
        name = "mapred.job.tracker.http.address";
        addr = jconf.get(name, null);
    }
    conf.set(name, addr);

    //    this.dfsCluster.getNameNode().get.getHttpAddress()

    String path = overrideDir.getCanonicalPath() + File.separator;
    System.out.println("writing conf to: " + path);
    OutputStream outCore = new FileOutputStream(path + "core-default.xml");
    OutputStream outMapred = new FileOutputStream(path + "mapred-default.xml");
    OutputStream outHdfs = new FileOutputStream(path + "hdfs-default.xml");
    conf.writeXml(outCore);
    conf.writeXml(outMapred);
    conf.writeXml(outHdfs);
    outCore.close();
    outMapred.close();
    outHdfs.close();
    System.out.println("setup complete");
    System.out.flush();
}

From source file:com.inmobi.conduit.distcp.tools.mapred.CopyMapper.java

License:Apache License

/**
 * Initialize SSL Config if same is set in conf
 *
 * @throws IOException - If any/*from  ww w .  ja  v  a  2s .  c o  m*/
 */
private void initializeSSLConf() throws IOException {
    LOG.info("Initializing SSL configuration");

    String workDir = conf.get("mapred.local.dir") + "/work";
    Path[] cacheFiles = DistributedCache.getLocalCacheFiles(conf);

    Configuration sslConfig = new Configuration(false);
    String sslConfFileName = conf.get(DistCpConstants.CONF_LABEL_SSL_CONF);
    Path sslClient = findCacheFile(cacheFiles, sslConfFileName);
    if (sslClient == null) {
        LOG.warn("SSL Client config file not found. Was looking for " + sslConfFileName + " in "
                + Arrays.toString(cacheFiles));
        return;
    }
    sslConfig.addResource(sslClient);

    String trustStoreFile = conf.get("ssl.client.truststore.location");
    Path trustStorePath = findCacheFile(cacheFiles, trustStoreFile);
    sslConfig.set("ssl.client.truststore.location", trustStorePath.toString());

    String keyStoreFile = conf.get("ssl.client.keystore.location");
    Path keyStorePath = findCacheFile(cacheFiles, keyStoreFile);
    sslConfig.set("ssl.client.keystore.location", keyStorePath.toString());

    try {
        OutputStream out = new FileOutputStream(workDir + "/" + sslConfFileName);
        try {
            sslConfig.writeXml(out);
        } finally {
            out.close();
        }
        conf.set(DistCpConstants.CONF_LABEL_SSL_KEYSTORE, sslConfFileName);
    } catch (IOException e) {
        LOG.warn("Unable to write out the ssl configuration. "
                + "Will fall back to default ssl-client.xml in class path, if there is one", e);
    }
}

From source file:com.intropro.prairie.unit.oozie.OozieUnit.java

License:Apache License

private void writeHadoopConfigs() throws IOException {
    hadoopConfDir = new File(getTmpDir().toFile(), "hadoop-conf");
    new File(hadoopConfDir, "default").mkdirs();
    Configuration yarnConfigs = yarnUnit.getConfig();
    yarnConfigs.set("mapreduce.framework.name", "yarn");
    FileOutputStream fileOutputStream = new FileOutputStream(new File(hadoopConfDir, "yarn-site.xml"));
    yarnConfigs.writeXml(fileOutputStream);
    fileOutputStream.close();//from   w  ww . j a  v  a2 s.c om
    fileOutputStream = new FileOutputStream(new File(hadoopConfDir, "hdfs-site.xml"));
    hdfsUnit.getConfig().writeXml(fileOutputStream);
    fileOutputStream.close();
}

From source file:com.moz.fiji.mapreduce.framework.JobHistoryFijiTable.java

License:Apache License

/**
 * Writes details of a job into the JobHistoryFijiTable.
 *
 * @param jobId unique identifier for the job.
 * @param jobName name of the job.//from ww w . j  ava2 s  .  c  o m
 * @param startTime time in milliseconds since the epoch at which the job started.
 * @param endTime time in milliseconds since the epoch at which the job ended.
 * @param jobSuccess whether the job completed successfully.
 * @param counters map of counters from the job. Keys should be of the form 'group:name'.
 * @param conf Configuration of the job.
 * @param extendedInfo any additional information which should be stored about the job.
 * @throws IOException in case of an error writing to the table.
 */
// CSOFF: ParameterNumberCheck
public void recordJob(final String jobId, final String jobName, final long startTime, final long endTime,
        final boolean jobSuccess, final Configuration conf, final Map<String, Long> counters,
        final Map<String, String> extendedInfo) throws IOException {
    // CSON: ParameterNumberCheck
    final EntityId eid = mFijiTable.getEntityId(jobId);
    final AtomicFijiPutter putter = mFijiTable.getWriterFactory().openAtomicPutter();
    try {
        putter.begin(eid);
        putter.put(JOB_HISTORY_FAMILY, JOB_HISTORY_ID_QUALIFIER, startTime, jobId);
        putter.put(JOB_HISTORY_FAMILY, JOB_HISTORY_NAME_QUALIFIER, startTime, jobName);
        putter.put(JOB_HISTORY_FAMILY, JOB_HISTORY_START_TIME_QUALIFIER, startTime, startTime);
        putter.put(JOB_HISTORY_FAMILY, JOB_HISTORY_END_TIME_QUALIFIER, startTime, endTime);
        putter.put(JOB_HISTORY_FAMILY, JOB_HISTORY_END_STATUS_QUALIFIER, startTime,
                (jobSuccess) ? SUCCEEDED : FAILED);
        putter.put(JOB_HISTORY_FAMILY, JOB_HISTORY_COUNTERS_QUALIFIER, startTime, counters.toString());
        if (null != conf) {
            final ByteArrayOutputStream baos = new ByteArrayOutputStream();
            conf.writeXml(baos);
            putter.put(JOB_HISTORY_FAMILY, JOB_HISTORY_CONFIGURATION_QUALIFIER, startTime,
                    baos.toString("UTF-8"));
        } else {
            putter.put(JOB_HISTORY_FAMILY, JOB_HISTORY_CONFIGURATION_QUALIFIER, startTime,
                    JOB_HISTORY_NO_CONFIGURATION_VALUE);
        }
        writeCounters(putter, startTime, counters);
        writeExtendedInfo(putter, startTime, extendedInfo);
        putter.commit();
    } finally {
        putter.close();
    }
}