List of usage examples for java.lang System setOut
public static void setOut(PrintStream out)
From source file:org.g_node.crawler.LKTLogbook.LKTLogCliToolControllerTest.java
/** * Delete temporary test files and folders, close Logger and reset std out. * @throws Exception/* w w w .j a v a 2 s. c o m*/ */ @After public void tearDown() throws Exception { if (Files.exists(this.testFileFolder)) { FileUtils.deleteDirectory(this.testFileFolder.toFile()); } this.rootLogger.removeAllAppenders(); System.setOut(this.stdout); }
From source file:org.arquillian.spacelift.process.impl.ProcessNameTest.java
@Test public void outputNoPrefix() throws UnsupportedEncodingException { // run only on linux Assume.assumeThat(SystemUtils.IS_OS_LINUX, is(true)); final ByteArrayOutputStream errorOutput = new ByteArrayOutputStream(); System.setOut(new PrintStream(errorOutput)); exception = ExpectedException.none(); try {//from w w w . j ava2s. co m exception.expectMessage(containsString("java -bar -baz")); Tasks.prepare(CommandTool.class).programName("java").parameters("-bar", "-baz") .interaction(new ProcessInteractionBuilder().outputPrefix("").when(".*").printToOut()).execute() .await(); } catch (ExecutionException e) { // ignore } String output = errorOutput.toString(Charset.defaultCharset().name()); Assert.assertThat(output, startsWith("Unrecognized option: -bar")); }
From source file:org.nuxeo.automation.scripting.test.TestCompileAndContext.java
@After public void cleanUpStreams() throws IOException { outContent.close(); System.setOut(outStream); }
From source file:org.apache.hadoop.hbase.backup.TestBackupShowHistory.java
/** * Verify that full backup is created on a single table with data correctly. Verify that history * works as expected/*from ww w .j a va2s. c o m*/ * @throws Exception */ @Test public void testBackupHistory() throws Exception { LOG.info("test backup history on a single table with data"); List<TableName> tableList = Lists.newArrayList(table1); String backupId = fullTableBackup(tableList); assertTrue(checkSucceeded(backupId)); LOG.info("backup complete"); List<BackupInfo> history = getBackupAdmin().getHistory(10); assertTrue(findBackup(history, backupId)); BackupInfo.Filter nullFilter = new BackupInfo.Filter() { @Override public boolean apply(BackupInfo info) { return true; } }; history = BackupUtils.getHistory(conf1, 10, new Path(BACKUP_ROOT_DIR), nullFilter); assertTrue(findBackup(history, backupId)); ByteArrayOutputStream baos = new ByteArrayOutputStream(); System.setOut(new PrintStream(baos)); String[] args = new String[] { "history", "-n", "10", "-p", BACKUP_ROOT_DIR }; // Run backup int ret = ToolRunner.run(conf1, new BackupDriver(), args); assertTrue(ret == 0); LOG.info("show_history"); String output = baos.toString(); LOG.info(output); baos.close(); assertTrue(output.indexOf(backupId) > 0); tableList = Lists.newArrayList(table2); String backupId2 = fullTableBackup(tableList); assertTrue(checkSucceeded(backupId2)); LOG.info("backup complete: " + table2); BackupInfo.Filter tableNameFilter = new BackupInfo.Filter() { @Override public boolean apply(BackupInfo image) { if (table1 == null) return true; List<TableName> names = image.getTableNames(); return names.contains(table1); } }; BackupInfo.Filter tableSetFilter = new BackupInfo.Filter() { @Override public boolean apply(BackupInfo info) { String backupId = info.getBackupId(); return backupId.startsWith("backup"); } }; history = getBackupAdmin().getHistory(10, tableNameFilter, tableSetFilter); assertTrue(history.size() > 0); boolean success = true; for (BackupInfo info : history) { if (!info.getTableNames().contains(table1)) { success = false; break; } } assertTrue(success); history = BackupUtils.getHistory(conf1, 10, new Path(BACKUP_ROOT_DIR), tableNameFilter, tableSetFilter); assertTrue(history.size() > 0); success = true; for (BackupInfo info : history) { if (!info.getTableNames().contains(table1)) { success = false; break; } } assertTrue(success); args = new String[] { "history", "-n", "10", "-p", BACKUP_ROOT_DIR, "-t", "table1", "-s", "backup" }; // Run backup ret = ToolRunner.run(conf1, new BackupDriver(), args); assertTrue(ret == 0); LOG.info("show_history"); }
From source file:org.apache.hadoop.hive.ql.hooks.PostExecOrcFileDump.java
@Override public void run(HookContext hookContext) throws Exception { assert (hookContext.getHookType() == HookContext.HookType.POST_EXEC_HOOK); HiveConf conf = hookContext.getConf(); LOG.info("Executing post execution hook to print orc file dump.."); QueryPlan plan = hookContext.getQueryPlan(); if (plan == null) { return;/*from w w w . j a v a 2s . com*/ } FetchTask fetchTask = plan.getFetchTask(); if (fetchTask != null) { SessionState ss = SessionState.get(); SessionState.LogHelper console = ss.getConsole(); // file dump should write to session state console's error stream PrintStream old = System.out; System.setOut(console.getErrStream()); FetchWork fetchWork = fetchTask.getWork(); boolean partitionedTable = fetchWork.isPartitioned(); List<Path> directories; if (partitionedTable) { LOG.info("Printing orc file dump for files from partitioned directory.."); directories = fetchWork.getPartDir(); } else { LOG.info("Printing orc file dump for files from table directory.."); directories = Lists.newArrayList(); directories.add(fetchWork.getTblDir()); } for (Path dir : directories) { FileSystem fs = dir.getFileSystem(conf); List<FileStatus> fileList = ShimLoader.getHadoopShims().listLocatedStatus(fs, dir, hiddenFileFilter); for (FileStatus fileStatus : fileList) { LOG.info("Printing orc file dump for " + fileStatus.getPath()); if (fileStatus.getLen() > 0) { try { // just creating orc reader is going to do sanity checks to make sure its valid ORC file OrcFile.createReader(fs, fileStatus.getPath()); console.printError("-- BEGIN ORC FILE DUMP --"); FileDump.main(new String[] { fileStatus.getPath().toString(), "--rowindex=1" }); console.printError("-- END ORC FILE DUMP --"); } catch (FileFormatException e) { LOG.warn("File " + fileStatus.getPath() + " is not ORC. Skip printing orc file dump"); } catch (IOException e) { LOG.warn("Skip printing orc file dump. Exception: " + e.getMessage()); } } else { LOG.warn("Zero length file encountered. Skip printing orc file dump."); } } } // restore the old out stream System.out.flush(); System.setOut(old); } }
From source file:ed.db.mql.MQLShell.java
MQLShell(PrintStream out, String[] args) throws Exception { Options opts = new Options(); opts.addOption("h", "help", false, "show command line usage"); opts.addOption("noexit", false, "remain at command prompt after running a script"); opts.addOption("db", true, "db to connect to"); opts.addOption("dump", false, "dump database"); CommandLine cl = (new PosixParser()).parse(opts, args); if (cl.hasOption("help")) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("mql [options] [script]", opts); System.exit(0);/* www . ja v a 2s. c o m*/ } setDump(cl.hasOption("dump")); _out = new MyPrintStream(out); // if dumping, try to intercept the spew from the libraries if (_dump) { _out._dropJunk = true; _out._comment = "// "; System.setOut(_out); } _scope = Scope.newGlobal().child(new File(".")); _scope.setGlobal(true); _scope.makeThreadLocal(); if (cl.hasOption("db")) { setDB(cl.getOptionValue("db")); } set_exit(!cl.hasOption("noexit")); // now, if we were given a list of files to deal with... _mqlArgs = cl.getArgs(); }
From source file:gov.nih.nci.ncicb.tcga.dcc.QCLiveTestDataGeneratorSlowTest.java
/** * Tests the {@link QCLiveTestDataGenerator#main(String[])} method by passing it an empty command line argument list. *//*from w w w .j a v a 2s.co m*/ @Test public void testNoArgs() { // Call the QCLiveTestDataGenerator with no arguments and capture the output QCLiveTestDataGenerator.main(new String[] {}); String actualOutput = outContent.toString().trim(); // Print the expected output to system out and capture its output System.setOut(new PrintStream(outContent = new ByteArrayOutputStream())); QCLiveTestDataGenerator.displayHelp(); String expectedOutput = outContent.toString().trim(); // Compare the results assertEquals(expectedOutput, actualOutput); }
From source file:org.apache.hadoop.hive.ql.session.TestClearDanglingScratchDir.java
public void rollbackStdOutErr() { System.setOut(origStdoutPs); System.setErr(origStderrPs); }
From source file:org.apache.rocketmq.tools.command.message.SendMessageCommandTest.java
@Test public void testExecute() throws SubCommandException { PrintStream out = System.out; ByteArrayOutputStream bos = new ByteArrayOutputStream(); System.setOut(new PrintStream(bos)); Options options = ServerUtil.buildCommandlineOptions(new Options()); String[] subargs = new String[] { "-t mytopic", "-p 'send message test'", "-c tagA", "-k order-16546745756" }; CommandLine commandLine = ServerUtil.parseCmdLine("mqadmin " + sendMessageCommand.commandName(), subargs, sendMessageCommand.buildCommandlineOptions(options), new PosixParser()); sendMessageCommand.execute(commandLine, options, null); subargs = new String[] { "-t mytopic", "-p 'send message test'", "-c tagA", "-k order-16546745756", "-b brokera", "-i 1" }; commandLine = ServerUtil.parseCmdLine("mqadmin " + sendMessageCommand.commandName(), subargs, sendMessageCommand.buildCommandlineOptions(options), new PosixParser()); sendMessageCommand.execute(commandLine, options, null); System.setOut(out);/*from w w w. java2 s . c om*/ String s = new String(bos.toByteArray()); Assert.assertTrue(s.contains("SEND_OK")); }
From source file:com.complexible.stardog.ext.spring.batch.TestSpringBatch.java
@After public void cleanUpStreams() { System.setOut(null); System.setErr(null); }