Example usage for org.apache.hadoop.conf Configuration writeXml

List of usage examples for org.apache.hadoop.conf Configuration writeXml

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration writeXml.

Prototype

public void writeXml(Writer out) throws IOException 

Source Link

Usage

From source file:org.apache.ignite.internal.processors.hadoop.impl.igfs.IgniteHadoopFileSystemAbstractSelfTest.java

License:Apache License

/** {@inheritDoc} */
@Override/*w  ww. ja va 2s .c  om*/
protected void beforeTestsStarted() throws Exception {
    Configuration secondaryConf = configurationSecondary(SECONDARY_AUTHORITY);

    secondaryConf.setInt("fs.igfs.block.size", 1024);

    String path = U.getIgniteHome() + SECONDARY_CFG_PATH;

    File file = new File(path);

    try (FileOutputStream fos = new FileOutputStream(file)) {
        secondaryConf.writeXml(fos);
    }

    startNodes();
}

From source file:org.apache.metron.integration.components.YarnComponent.java

License:Apache License

@Override
public void start() throws UnableToStartException {
    conf = new YarnConfiguration();
    conf.setInt(YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB, 128);
    conf.set("yarn.log.dir", "target");
    conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, true);
    conf.set(YarnConfiguration.RM_SCHEDULER, CapacityScheduler.class.getName());
    conf.setBoolean(YarnConfiguration.NODE_LABELS_ENABLED, true);

    try {//w ww . j  a va 2 s . c o  m
        yarnCluster = new MiniYARNCluster(testName, 1, NUM_NMS, 1, 1, true);
        yarnCluster.init(conf);

        yarnCluster.start();

        waitForNMsToRegister();

        URL url = Thread.currentThread().getContextClassLoader().getResource("yarn-site.xml");
        if (url == null) {
            throw new RuntimeException("Could not find 'yarn-site.xml' dummy file in classpath");
        }
        Configuration yarnClusterConfig = yarnCluster.getConfig();
        yarnClusterConfig.set("yarn.application.classpath", new File(url.getPath()).getParent());
        //write the document to a buffer (not directly to the file, as that
        //can cause the file being written to get read -which will then fail.
        ByteArrayOutputStream bytesOut = new ByteArrayOutputStream();
        yarnClusterConfig.writeXml(bytesOut);
        bytesOut.close();
        //write the bytes to the file in the classpath
        OutputStream os = new FileOutputStream(new File(url.getPath()));
        os.write(bytesOut.toByteArray());
        os.close();
        FileContext fsContext = FileContext.getLocalFSFileContext();
        fsContext.delete(new Path(conf.get("yarn.timeline-service.leveldb-timeline-store.path")), true);
        try {
            Thread.sleep(2000);
        } catch (InterruptedException e) {
        }
    } catch (Exception e) {
        throw new UnableToStartException("Exception setting up yarn cluster", e);
    }
}

From source file:org.apache.nifi.processors.kite.TestConfigurationProperty.java

License:Apache License

@Before
public void saveConfiguration() throws IOException {
    Configuration conf = new Configuration(false);
    conf.setBoolean("nifi.config.canary", true);

    confLocation = temp.newFile("nifi-conf.xml");
    FileOutputStream out = new FileOutputStream(confLocation);
    conf.writeXml(out);
    out.close();/*from   ww  w  . j av  a  2 s. co m*/
}

From source file:org.apache.oozie.action.hadoop.HiveMain.java

License:Apache License

public static Configuration setUpHiveSite() throws Exception {
    Configuration hiveConf = initActionConf();

    // Write the action configuration out to hive-site.xml
    OutputStream os = new FileOutputStream(HIVE_SITE_CONF);
    hiveConf.writeXml(os);
    os.close();//www . j a  v  a 2  s .  co  m

    System.out.println();
    System.out.println("Hive Configuration Properties:");
    System.out.println("------------------------");
    for (Entry<String, String> entry : hiveConf) {
        System.out.println(entry.getKey() + "=" + entry.getValue());
    }
    System.out.flush();
    System.out.println("------------------------");
    System.out.println();

    // Reset the hiveSiteURL static variable as we just created hive-site.xml.
    // If prepare block had a drop partition it would have been initialized to null.
    Field declaredField = HiveConf.class.getDeclaredField("hiveSiteURL");
    if (declaredField != null) {
        declaredField.setAccessible(true);
        declaredField.set(null, HiveConf.class.getClassLoader().getResource("hive-site.xml"));
    }
    return hiveConf;
}

From source file:org.apache.oozie.action.hadoop.LauncherMapper.java

License:Apache License

/**
 * Pushing all important conf to hadoop conf for the action
 *//*from w  ww  .  java2s  .  com*/
private void propagateToHadoopConf() throws IOException {
    Configuration propagationConf = new Configuration(false);
    if (System.getProperty(OOZIE_ACTION_ID) != null) {
        propagationConf.set(OOZIE_ACTION_ID, System.getProperty(OOZIE_ACTION_ID));
    }
    if (System.getProperty(OOZIE_JOB_ID) != null) {
        propagationConf.set(OOZIE_JOB_ID, System.getProperty(OOZIE_JOB_ID));
    }
    if (System.getProperty(OOZIE_LAUNCHER_JOB_ID) != null) {
        propagationConf.set(OOZIE_LAUNCHER_JOB_ID, System.getProperty(OOZIE_LAUNCHER_JOB_ID));
    }

    // loading action conf prepared by Oozie
    Configuration actionConf = LauncherMain.loadActionConf();

    if (actionConf.get(LauncherMainHadoopUtils.CHILD_MAPREDUCE_JOB_TAGS) != null) {
        propagationConf.set(LauncherMain.MAPREDUCE_JOB_TAGS,
                actionConf.get(LauncherMainHadoopUtils.CHILD_MAPREDUCE_JOB_TAGS));
    }

    propagationConf.writeXml(new FileWriter(PROPAGATION_CONF_XML));
    Configuration.dumpConfiguration(propagationConf, new OutputStreamWriter(System.out));
    Configuration.addDefaultResource(PROPAGATION_CONF_XML);
}

From source file:org.apache.oozie.action.hadoop.LauncherMapperHelper.java

License:Apache License

public static void setupLauncherInfo(JobConf launcherConf, String jobId, String actionId, Path actionDir,
        String recoveryId, Configuration actionConf, String prepareXML)
        throws IOException, HadoopAccessorException {

    launcherConf.setMapperClass(LauncherMapper.class);
    launcherConf.setSpeculativeExecution(false);
    launcherConf.setNumMapTasks(1);/* w  ww .j ava 2 s .  com*/
    launcherConf.setNumReduceTasks(0);

    launcherConf.set(LauncherMapper.OOZIE_JOB_ID, jobId);
    launcherConf.set(LauncherMapper.OOZIE_ACTION_ID, actionId);
    launcherConf.set(LauncherMapper.OOZIE_ACTION_DIR_PATH, actionDir.toString());
    launcherConf.set(LauncherMapper.OOZIE_ACTION_RECOVERY_ID, recoveryId);
    launcherConf.set(LauncherMapper.ACTION_PREPARE_XML, prepareXML);

    actionConf.set(LauncherMapper.OOZIE_JOB_ID, jobId);
    actionConf.set(LauncherMapper.OOZIE_ACTION_ID, actionId);

    if (Services.get().getConf().getBoolean("oozie.hadoop-2.0.2-alpha.workaround.for.distributed.cache",
            false)) {
        List<String> purgedEntries = new ArrayList<String>();
        Collection<String> entries = actionConf.getStringCollection("mapreduce.job.cache.files");
        for (String entry : entries) {
            if (entry.contains("#")) {
                purgedEntries.add(entry);
            }
        }
        actionConf.setStrings("mapreduce.job.cache.files",
                purgedEntries.toArray(new String[purgedEntries.size()]));
        launcherConf.setBoolean("oozie.hadoop-2.0.2-alpha.workaround.for.distributed.cache", true);
    }

    FileSystem fs = Services.get().get(HadoopAccessorService.class)
            .createFileSystem(launcherConf.get("user.name"), actionDir.toUri(), launcherConf);
    fs.mkdirs(actionDir);

    OutputStream os = fs.create(new Path(actionDir, LauncherMapper.ACTION_CONF_XML));
    try {
        actionConf.writeXml(os);
    } finally {
        IOUtils.closeSafely(os);
    }

    launcherConf.setInputFormat(OozieLauncherInputFormat.class);
    launcherConf.set("mapred.output.dir", new Path(actionDir, "output").toString());
}

From source file:org.apache.oozie.action.hadoop.SqoopMain.java

License:Apache License

public static Configuration setUpSqoopSite() throws Exception {
    Configuration sqoopConf = initActionConf();

    // Write the action configuration out to sqoop-site.xml
    OutputStream os = new FileOutputStream(SQOOP_SITE_CONF);
    try {//from  w w  w .j  a  v a  2  s .c o m
        sqoopConf.writeXml(os);
    } finally {
        os.close();
    }

    System.out.println();
    System.out.println("Sqoop Configuration Properties:");
    System.out.println("------------------------");
    for (Map.Entry<String, String> entry : sqoopConf) {
        System.out.println(entry.getKey() + "=" + entry.getValue());
    }
    System.out.flush();
    System.out.println("------------------------");
    System.out.println();
    return sqoopConf;
}

From source file:org.apache.oozie.action.hadoop.TestFsELFunctions.java

License:Apache License

public void testFunctions() throws Exception {
    String file1 = new Path(getFsTestCaseDir(), "file1").toString();
    String file2 = new Path(getFsTestCaseDir(), "file2").toString();
    String dir = new Path(getFsTestCaseDir(), "dir").toString();
    Configuration protoConf = new Configuration();
    protoConf.set(OozieClient.USER_NAME, getTestUser());
    protoConf.set("hadoop.job.ugi", getTestUser() + "," + "group");

    FileSystem fs = getFileSystem();
    fs.mkdirs(new Path(dir));
    fs.create(new Path(file1)).close();
    OutputStream os = fs.create(new Path(dir, "a"));
    byte[] arr = new byte[1];
    os.write(arr);//from  w ww.  j  a  va  2s.c o  m
    os.close();
    os = fs.create(new Path(dir, "b"));
    arr = new byte[2];
    os.write(arr);
    os.close();

    Configuration conf = new XConfiguration();
    conf.set(OozieClient.APP_PATH, "appPath");
    conf.set(OozieClient.USER_NAME, getTestUser());

    conf.set("test.dir", getTestCaseDir());
    conf.set("file1", file1);
    conf.set("file2", file2);
    conf.set("file3", "${file2}");
    conf.set("file4", getFsTestCaseDir() + "/file{1,2}");
    conf.set("file5", getFsTestCaseDir() + "/file*");
    conf.set("file6", getFsTestCaseDir() + "/file_*");
    conf.set("dir", dir);

    LiteWorkflowApp def = new LiteWorkflowApp("name", "<workflow-app/>",
            new StartNodeDef(LiteWorkflowStoreService.LiteControlNodeHandler.class, "end"))
                    .addNode(new EndNodeDef("end", LiteWorkflowStoreService.LiteControlNodeHandler.class));
    LiteWorkflowInstance job = new LiteWorkflowInstance(def, conf, "wfId");

    WorkflowJobBean wf = new WorkflowJobBean();
    wf.setId(job.getId());
    wf.setAppName("name");
    wf.setAppPath("appPath");
    wf.setUser(getTestUser());
    wf.setGroup("group");
    wf.setWorkflowInstance(job);
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    protoConf.writeXml(baos);
    wf.setProtoActionConf(baos.toString());

    WorkflowActionBean action = new WorkflowActionBean();
    action.setId("actionId");
    action.setName("actionName");

    ELEvaluator eval = Services.get().get(ELService.class).createEvaluator("workflow");
    DagELFunctions.configureEvaluator(eval, wf, action);

    assertEquals(true, (boolean) eval.evaluate("${fs:exists(wf:conf('file1'))}", Boolean.class));
    assertEquals(false, (boolean) eval.evaluate("${fs:exists(wf:conf('file2'))}", Boolean.class));
    assertEquals(true, (boolean) eval.evaluate("${fs:exists(wf:conf('file4'))}", Boolean.class));
    assertEquals(true, (boolean) eval.evaluate("${fs:exists(wf:conf('file5'))}", Boolean.class));
    assertEquals(false, (boolean) eval.evaluate("${fs:exists(wf:conf('file6'))}", Boolean.class));
    assertEquals(true, (boolean) eval.evaluate("${fs:exists(wf:conf('dir'))}", Boolean.class));
    assertEquals(false, (boolean) eval.evaluate("${fs:isDir(wf:conf('file1'))}", Boolean.class));
    assertEquals(0, (int) eval.evaluate("${fs:fileSize(wf:conf('file1'))}", Integer.class));
    assertEquals(-1, (int) eval.evaluate("${fs:fileSize(wf:conf('file2'))}", Integer.class));
    assertEquals(3, (int) eval.evaluate("${fs:dirSize(wf:conf('dir'))}", Integer.class));
    assertEquals(-1, (int) eval.evaluate("${fs:blockSize(wf:conf('file2'))}", Integer.class));
    assertTrue(eval.evaluate("${fs:blockSize(wf:conf('file1'))}", Integer.class) > 0);
}

From source file:org.apache.oozie.action.hadoop.TestJavaActionExecutor.java

License:Apache License

@SuppressWarnings("unchecked")
public void testSetupMethods() throws Exception {
    JavaActionExecutor ae = new JavaActionExecutor();
    assertEquals(Arrays.asList(JavaMain.class), ae.getLauncherClasses());
    Configuration conf = new XConfiguration();
    conf.set("user.name", "a");
    try {/*w ww . ja  v a2  s .  c  om*/
        JavaActionExecutor.checkForDisallowedProps(conf, "x");
        fail();
    } catch (ActionExecutorException ex) {
    }

    conf = new XConfiguration();
    conf.set("mapred.job.tracker", "a");
    try {
        JavaActionExecutor.checkForDisallowedProps(conf, "x");
        fail();
    } catch (ActionExecutorException ex) {
    }

    conf = new XConfiguration();
    conf.set("fs.default.name", "a");
    try {
        JavaActionExecutor.checkForDisallowedProps(conf, "x");
        fail();
    } catch (ActionExecutorException ex) {
    }

    conf = new XConfiguration();
    conf.set("a", "a");
    try {
        JavaActionExecutor.checkForDisallowedProps(conf, "x");
    } catch (ActionExecutorException ex) {
        fail();
    }

    Element actionXml = XmlUtils.parseXml("<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>"
            + "<name-node>" + getNameNodeUri() + "</name-node>" + "<job-xml>job.xml</job-xml>"
            + "<job-xml>job2.xml</job-xml>" + "<configuration>"
            + "<property><name>oozie.launcher.a</name><value>LA</value></property>"
            + "<property><name>a</name><value>AA</value></property>"
            + "<property><name>b</name><value>BB</value></property>" + "</configuration>"
            + "<main-class>MAIN-CLASS</main-class>" + "<java-opts>JAVA-OPTS</java-opts>" + "<arg>A1</arg>"
            + "<arg>A2</arg>" + "<file>f.jar</file>" + "<archive>a.tar</archive>" + "</java>");

    Path appPath = new Path(getFsTestCaseDir(), "wf");

    Path appJarPath = new Path("lib/a.jar");
    getFileSystem().create(new Path(appPath, appJarPath)).close();

    Path appSoPath = new Path("lib/a.so");
    getFileSystem().create(new Path(appPath, appSoPath)).close();

    Path appSo1Path = new Path("lib/a.so.1");
    String expectedSo1Path = "lib/a.so.1#a.so.1";
    getFileSystem().create(new Path(appPath, appSo1Path)).close();

    Path filePath = new Path("f.jar");
    getFileSystem().create(new Path(appPath, filePath)).close();

    Path archivePath = new Path("a.tar");
    getFileSystem().create(new Path(appPath, archivePath)).close();

    XConfiguration protoConf = new XConfiguration();
    protoConf.set(WorkflowAppService.HADOOP_USER, getTestUser());
    protoConf.setStrings(WorkflowAppService.APP_LIB_PATH_LIST, appJarPath.toString(), appSoPath.toString());

    WorkflowJobBean wf = createBaseWorkflow(protoConf, "action");
    WorkflowActionBean action = (WorkflowActionBean) wf.getActions().get(0);
    action.setType(ae.getType());

    Context context = new Context(wf, action);

    conf = new XConfiguration();
    conf.set("c", "C");
    conf.set("oozie.launcher.d", "D");
    OutputStream os = getFileSystem().create(new Path(getFsTestCaseDir(), "job.xml"));
    conf.writeXml(os);
    os.close();

    conf = new XConfiguration();
    conf.set("e", "E");
    conf.set("oozie.launcher.f", "F");
    os = getFileSystem().create(new Path(getFsTestCaseDir(), "job2.xml"));
    conf.writeXml(os);
    os.close();

    conf = ae.createBaseHadoopConf(context, actionXml);
    assertEquals(protoConf.get(WorkflowAppService.HADOOP_USER), conf.get(WorkflowAppService.HADOOP_USER));
    assertEquals(getJobTrackerUri(), conf.get("mapred.job.tracker"));
    assertEquals(getNameNodeUri(), conf.get("fs.default.name"));

    conf = ae.createBaseHadoopConf(context, actionXml);
    ae.setupLauncherConf(conf, actionXml, getFsTestCaseDir(), context);
    assertEquals("LA", conf.get("oozie.launcher.a"));
    assertEquals("LA", conf.get("a"));
    assertNull(conf.get("b"));
    assertNull(conf.get("oozie.launcher.d"));
    assertNull(conf.get("d"));
    assertNull(conf.get("action.foo"));
    assertEquals("action.barbar", conf.get("action.foofoo"));

    conf = ae.createBaseHadoopConf(context, actionXml);
    ae.setupActionConf(conf, context, actionXml, getFsTestCaseDir());
    assertEquals("LA", conf.get("oozie.launcher.a"));
    assertEquals("AA", conf.get("a"));
    assertEquals("BB", conf.get("b"));
    assertEquals("C", conf.get("c"));
    assertEquals("D", conf.get("oozie.launcher.d"));
    assertEquals("E", conf.get("e"));
    assertEquals("F", conf.get("oozie.launcher.f"));
    assertEquals("action.bar", conf.get("action.foo"));

    conf = ae.createBaseHadoopConf(context, actionXml);
    ae.setupLauncherConf(conf, actionXml, getFsTestCaseDir(), context);
    ae.addToCache(conf, appPath, appJarPath.toString(), false);
    assertTrue(conf.get("mapred.job.classpath.files").contains(appJarPath.toUri().getPath()));
    ae.addToCache(conf, appPath, appSoPath.toString(), false);
    assertTrue(conf.get("mapred.cache.files").contains(appSoPath.toUri().getPath()));
    ae.addToCache(conf, appPath, appSo1Path.toString(), false);
    assertTrue(conf.get("mapred.cache.files").contains(expectedSo1Path));
    assertFalse(getFileSystem().exists(context.getActionDir()));
    ae.prepareActionDir(getFileSystem(), context);
    assertTrue(getFileSystem().exists(context.getActionDir()));

    ae.cleanUpActionDir(getFileSystem(), context);
    assertFalse(getFileSystem().exists(context.getActionDir()));

    conf = ae.createBaseHadoopConf(context, actionXml);
    ae.setupLauncherConf(conf, actionXml, getFsTestCaseDir(), context);
    ae.setLibFilesArchives(context, actionXml, appPath, conf);

    assertTrue(conf.get("mapred.cache.files").contains(filePath.toUri().getPath()));
    assertTrue(conf.get("mapred.cache.archives").contains(archivePath.toUri().getPath()));

    conf = ae.createBaseHadoopConf(context, actionXml);
    ae.setupActionConf(conf, context, actionXml, getFsTestCaseDir());
    ae.setLibFilesArchives(context, actionXml, appPath, conf);

    assertTrue(conf.get("mapred.cache.files").contains(filePath.toUri().getPath()));
    assertTrue(conf.get("mapred.cache.archives").contains(archivePath.toUri().getPath()));

    Configuration actionConf = ae.createBaseHadoopConf(context, actionXml);
    ae.setupActionConf(actionConf, context, actionXml, getFsTestCaseDir());

    conf = ae.createLauncherConf(getFileSystem(), context, action, actionXml, actionConf);
    ae.setupLauncherConf(conf, actionXml, getFsTestCaseDir(), context);
    assertEquals("MAIN-CLASS", actionConf.get("oozie.action.java.main", "null"));
    assertEquals("org.apache.oozie.action.hadoop.JavaMain", ae.getLauncherMain(conf, actionXml));
    assertTrue(conf.get("mapred.child.java.opts").contains("JAVA-OPTS"));
    assertTrue(conf.get("mapreduce.map.java.opts").contains("JAVA-OPTS"));
    assertEquals(Arrays.asList("A1", "A2"), Arrays.asList(LauncherMapper.getMainArguments(conf)));

    assertTrue(getFileSystem().exists(new Path(context.getActionDir(), LauncherMapper.ACTION_CONF_XML)));

    actionXml = XmlUtils.parseXml("<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>"
            + "<name-node>" + getNameNodeUri() + "</name-node> <configuration>"
            + "<property><name>mapred.job.queue.name</name><value>AQ</value></property>"
            + "<property><name>oozie.action.sharelib.for.java</name><value>sharelib-java</value></property>"
            + "</configuration>" + "<main-class>MAIN-CLASS</main-class>" + "</java>");
    actionConf = ae.createBaseHadoopConf(context, actionXml);
    ae.setupActionConf(actionConf, context, actionXml, appPath);
    conf = ae.createLauncherConf(getFileSystem(), context, action, actionXml, actionConf);
    assertEquals("AQ", conf.get("mapred.job.queue.name"));
    assertEquals("AQ", actionConf.get("mapred.job.queue.name"));
    assertEquals("sharelib-java", actionConf.get("oozie.action.sharelib.for.java"));

    actionXml = XmlUtils.parseXml("<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>"
            + "<name-node>" + getNameNodeUri() + "</name-node> <configuration>"
            + "<property><name>oozie.launcher.mapred.job.queue.name</name><value>LQ</value></property>"
            + "</configuration>" + "<main-class>MAIN-CLASS</main-class>" + "</java>");
    actionConf = ae.createBaseHadoopConf(context, actionXml);
    ae.setupActionConf(actionConf, context, actionXml, appPath);
    conf = ae.createLauncherConf(getFileSystem(), context, action, actionXml, actionConf);
    assertEquals("LQ", conf.get("mapred.job.queue.name"));

    actionXml = XmlUtils.parseXml("<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>"
            + "<name-node>" + getNameNodeUri() + "</name-node> <configuration>"
            + "<property><name>oozie.launcher.mapred.job.queue.name</name><value>LQ</value></property>"
            + "<property><name>mapred.job.queue.name</name><value>AQ</value></property>" + "</configuration>"
            + "<main-class>MAIN-CLASS</main-class>" + "</java>");
    actionConf = ae.createBaseHadoopConf(context, actionXml);
    ae.setupActionConf(actionConf, context, actionXml, appPath);
    conf = ae.createLauncherConf(getFileSystem(), context, action, actionXml, actionConf);
    assertEquals("LQ", conf.get("mapred.job.queue.name"));
    assertEquals("AQ", actionConf.get("mapred.job.queue.name"));
    assertEquals(true, conf.getBoolean("mapreduce.job.complete.cancel.delegation.tokens", false));
    assertEquals(false, actionConf.getBoolean("mapreduce.job.complete.cancel.delegation.tokens", true));
}

From source file:org.apache.oozie.action.hadoop.TestLauncherMain.java

License:Apache License

@Test
public void testConfigWrite() throws IOException {
    File f = new File(tmp.newFolder(), "nonExistentFile");
    assertFalse(f.exists());//from w  ww.j  a va  2s . c o  m
    try (FileOutputStream fos = LauncherMain.createStreamIfFileNotExists(f)) {
        Configuration c = new Configuration(false);
        c.set("foo", "bar");
        c.writeXml(fos);
    }
    String contents = new String(Files.readAllBytes(f.toPath()), StandardCharsets.UTF_8);
    assertTrue(contents.contains("foo"));
    assertTrue(contents.contains("bar"));
    assertTrue(contents.contains("<configuration>"));
    assertTrue(contents.contains("<property"));
}