List of usage examples for org.apache.hadoop.conf Configuration writeXml
public void writeXml(Writer out) throws IOException
From source file:co.cask.cdap.explore.service.ExploreServiceUtilsTest.java
License:Apache License
@Test public void hijackConfFileTest() throws Exception { Configuration conf = new Configuration(false); conf.set("foo", "bar"); Assert.assertEquals(1, conf.size()); File tempDir = tmpFolder.newFolder(); File confFile = tmpFolder.newFile("hive-site.xml"); try (FileOutputStream os = new FileOutputStream(confFile)) { conf.writeXml(os); }/*from w w w. j a v a 2 s . co m*/ File newConfFile = ExploreServiceUtils.updateConfFileForExplore(confFile, tempDir); conf = new Configuration(false); conf.addResource(newConfFile.toURI().toURL()); Assert.assertEquals(3, conf.size()); Assert.assertEquals("false", conf.get(Job.MAPREDUCE_JOB_USER_CLASSPATH_FIRST)); Assert.assertEquals("false", conf.get(Job.MAPREDUCE_JOB_CLASSLOADER)); Assert.assertEquals("bar", conf.get("foo")); // check yarn-site changes confFile = tmpFolder.newFile("yarn-site.xml"); conf = new YarnConfiguration(); try (FileOutputStream os = new FileOutputStream(confFile)) { conf.writeXml(os); } String yarnApplicationClassPath = "$PWD/*," + conf.get(YarnConfiguration.YARN_APPLICATION_CLASSPATH, Joiner.on(",").join(YarnConfiguration.DEFAULT_YARN_APPLICATION_CLASSPATH)); newConfFile = ExploreServiceUtils.updateConfFileForExplore(confFile, tempDir); conf = new Configuration(false); conf.addResource(newConfFile.toURI().toURL()); Assert.assertEquals(yarnApplicationClassPath, conf.get(YarnConfiguration.YARN_APPLICATION_CLASSPATH)); // check mapred-site changes confFile = tmpFolder.newFile("mapred-site.xml"); conf = new YarnConfiguration(); try (FileOutputStream os = new FileOutputStream(confFile)) { conf.writeXml(os); } String mapredApplicationClassPath = "$PWD/*," + conf.get(MRJobConfig.MAPREDUCE_APPLICATION_CLASSPATH, MRJobConfig.DEFAULT_MAPREDUCE_APPLICATION_CLASSPATH); newConfFile = ExploreServiceUtils.updateConfFileForExplore(confFile, tempDir); conf = new Configuration(false); conf.addResource(newConfFile.toURI().toURL()); Assert.assertEquals(mapredApplicationClassPath, conf.get(MRJobConfig.MAPREDUCE_APPLICATION_CLASSPATH)); // Ensure conf files that are not hive-site.xml/mapred-site.xml/yarn-site.xml are unchanged confFile = tmpFolder.newFile("core-site.xml"); Assert.assertEquals(confFile, ExploreServiceUtils.updateConfFileForExplore(confFile, tempDir)); }
From source file:co.cask.cdap.hive.context.HConfCodec.java
License:Apache License
@Override public byte[] encode(Configuration object) throws IOException { ByteArrayOutputStream bos = new ByteArrayOutputStream(); object.writeXml(bos); bos.close();/*from w w w .jav a 2s . c o m*/ return bos.toByteArray(); }
From source file:co.cask.cdap.internal.app.runtime.distributed.AbstractDistributedProgramRunner.java
License:Apache License
private File saveHConf(Configuration conf, File file) throws IOException { try (Writer writer = Files.newWriter(file, Charsets.UTF_8)) { conf.writeXml(writer); }/*from w ww . j a v a 2 s . c om*/ return file; }
From source file:co.cask.cdap.internal.app.runtime.spark.SparkRuntimeService.java
License:Apache License
/** * Serialize {@link Configuration} to a file. * * @return The {@link File} of the serialized configuration in the given target directory. *//*from w w w. ja v a 2 s. c o m*/ private File saveHConf(Configuration hConf, File targetDir) throws IOException { File file = new File(targetDir, SparkContextProvider.HCONF_FILE_NAME); try (Writer writer = Files.newWriter(file, Charsets.UTF_8)) { hConf.writeXml(writer); } return file; }
From source file:co.cask.tigon.internal.app.runtime.distributed.AbstractDistributedProgramRunner.java
License:Apache License
private File saveHConf(Configuration conf, File file) throws IOException { Writer writer = Files.newWriter(file, Charsets.UTF_8); try {/*from w w w .j av a 2 s . c om*/ conf.writeXml(writer); } finally { writer.close(); } return file; }
From source file:com.alibaba.wasp.fserver.FSDumpServlet.java
License:Apache License
@Override public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException { FServer fs = (FServer) getServletContext().getAttribute(FServer.FSERVER); assert fs != null : "No FS in context!"; Configuration hrsconf = (Configuration) getServletContext().getAttribute(FServer.FSERVER_CONF); assert hrsconf != null : "No FS conf in context"; response.setContentType("text/plain"); OutputStream os = response.getOutputStream(); PrintWriter out = new PrintWriter(os); out.println("Master status for " + fs.getServerName() + " as of " + new Date()); out.println("\n\nVersion Info:"); out.println(LINE);/* ww w. j a v a 2 s . c o m*/ dumpVersionInfo(out); out.println("\n\nTasks:"); out.println(LINE); TaskMonitor.get().dumpAsText(out); out.println("\n\nExecutors:"); out.println(LINE); dumpExecutors(fs.getExecutorService(), out); out.println("\n\nStacks:"); out.println(LINE); ReflectionUtils.printThreadInfo(out, ""); out.println("\n\nFS Configuration:"); out.println(LINE); Configuration conf = fs.getConfiguration(); out.flush(); conf.writeXml(os); os.flush(); out.println("\n\nLogs"); out.println(LINE); long tailKb = getTailKbParam(request); LogMonitoring.dumpTailOfLogs(out, tailKb); out.println("\n\nFS Queue:"); out.println(LINE); if (isShowQueueDump(hrsconf)) { dumpQueue(fs, out); } out.flush(); }
From source file:com.alibaba.wasp.master.FMasterDumpServlet.java
License:Apache License
@Override public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException { FMaster master = (FMaster) getServletContext().getAttribute(FMaster.MASTER); assert master != null : "No Master in context!"; response.setContentType("text/plain"); OutputStream os = response.getOutputStream(); PrintWriter out = new PrintWriter(os); out.println("Master status for " + master.getServerName() + " as of " + new Date()); out.println("\n\nVersion Info:"); out.println(LINE);//from w w w .j a v a 2 s . co m dumpVersionInfo(out); out.println("\n\nTasks:"); out.println(LINE); TaskMonitor.get().dumpAsText(out); out.println("\n\nServers:"); out.println(LINE); dumpServers(master, out); out.println("\n\nEntityGroups-in-transition:"); out.println(LINE); dumpRIT(master, out); out.println("\n\nExecutors:"); out.println(LINE); dumpExecutors(master.getExecutorService(), out); out.println("\n\nStacks:"); out.println(LINE); ReflectionUtils.printThreadInfo(out, ""); out.println("\n\nMaster configuration:"); out.println(LINE); Configuration conf = master.getConfiguration(); out.flush(); conf.writeXml(os); os.flush(); // out.println("\n\nRecent fserver aborts:"); // out.println(LINE); // master.getFServerFatalLogBuffer().dumpTo(out); out.println("\n\nLogs"); out.println(LINE); long tailKb = getTailKbParam(request); LogMonitoring.dumpTailOfLogs(out, tailKb); out.flush(); }
From source file:com.asakusafw.dag.compiler.extension.directio.DirectFilePortDriverTest.java
License:Apache License
private void enableDirectIo() { Configuration configuration = helper.getContext().newConfiguration(); profile.forFrameworkInstallation().add(LOCATION_CORE_CONFIGURATION, o -> configuration.writeXml(o)); }
From source file:com.asakusafw.m3bp.compiler.core.M3bpJobflowProcessorTest.java
License:Apache License
private void enableDirectIo() { Configuration configuration = directio.getContext().newConfiguration(); profile.forFrameworkInstallation().add(LOCATION_CORE_CONFIGURATION, o -> configuration.writeXml(o)); }
From source file:com.asakusafw.runtime.util.hadoop.ConfigurationProviderTest.java
License:Apache License
private File putConf(String path) { Configuration c = new Configuration(false); c.set("testing.conf", "added"); File file = create(path);/* ww w.j a v a 2 s . co m*/ try (OutputStream s = new FileOutputStream(file)) { c.writeXml(s); } catch (IOException e) { throw new AssertionError(e); } return file; }