List of usage examples for java.lang System setOut
public static void setOut(PrintStream out)
From source file:com.enioka.jqm.tools.MiscTest.java
@Test public void testMultiLog() throws Exception { PrintStream out_ini = System.out; PrintStream err_ini = System.err; Helpers.setSingleParam("logFilePerLaunch", "true", em); CreationTools.createJobDef(null, true, "App", null, "jqm-tests/jqm-test-datetimemaven/target/test.jar", TestHelpers.qVip, 42, "MarsuApplication", null, "Franquin", "ModuleMachin", "other", "other", true, em);/*from w w w .jav a 2 s . co m*/ int i = JobRequest.create("MarsuApplication", "TestUser").submit(); addAndStartEngine(); TestHelpers.waitFor(1, 20000, em); String fileName = StringUtils.leftPad("" + i, 10, "0") + ".stdout.log"; File f = new File(FilenameUtils.concat(((MultiplexPrintStream) System.out).rootLogDir, fileName)); Assert.assertEquals(1, TestHelpers.getOkCount(em)); Assert.assertEquals(0, TestHelpers.getNonOkCount(em)); Assert.assertTrue(f.exists()); System.setErr(err_ini); System.setOut(out_ini); }
From source file:org.dita.dost.AbstractIntegrationTest.java
/** * Run test conversion/*from www .j a v a2 s . c o m*/ * * @param srcDir test source directory * @param transtype transtype to test * @return list of log messages * @throws Exception if conversion failed */ private List<TestListener.Message> runOt(final File srcDir, final Transtype transtype, final File tempBaseDir, final File resBaseDir, final Map<String, String> args, final String[] targets) throws Exception { final File tempDir = new File(tempBaseDir, transtype.toString()); final File resDir = new File(resBaseDir, transtype.toString()); deleteDirectory(resDir); deleteDirectory(tempDir); final TestListener listener = new TestListener(System.out, System.err); final PrintStream savedErr = System.err; final PrintStream savedOut = System.out; try { final File buildFile = new File(ditaDir, "build.xml"); final Project project = new Project(); project.addBuildListener(listener); System.setOut(new PrintStream(new DemuxOutputStream(project, false))); System.setErr(new PrintStream(new DemuxOutputStream(project, true))); project.fireBuildStarted(); project.init(); project.setUserProperty("transtype", transtype.name); if (transtype.equals("pdf") || transtype.equals("pdf2")) { project.setUserProperty("pdf.formatter", "fop"); project.setUserProperty("fop.formatter.output-format", "text/plain"); } project.setUserProperty("generate-debug-attributes", "false"); project.setUserProperty("preprocess.copy-generated-files.skip", "true"); project.setUserProperty("ant.file", buildFile.getAbsolutePath()); project.setUserProperty("ant.file.type", "file"); project.setUserProperty("dita.dir", ditaDir.getAbsolutePath()); project.setUserProperty("output.dir", resDir.getAbsolutePath()); project.setUserProperty("dita.temp.dir", tempDir.getAbsolutePath()); project.setUserProperty("clean.temp", "no"); args.entrySet().forEach(e -> project.setUserProperty(e.getKey(), e.getValue())); project.setKeepGoingMode(false); ProjectHelper.configureProject(project, buildFile); final Vector<String> ts = new Vector<>(); if (targets != null) { ts.addAll(Arrays.asList(targets)); } else { ts.addAll(Arrays.asList(transtype.targets)); } project.executeTargets(ts); return listener.messages; } finally { System.setOut(savedOut); System.setErr(savedErr); } }
From source file:net.openbyte.gui.WorkFrame.java
private void menuItem11ActionPerformed(ActionEvent e) { PrintStream previous = System.out; OutputFrame frame = new OutputFrame(this); System.setOut(new PrintStream(new StreamCapturer("OpenByte", frame, previous))); frame.setVisible(true);//from w w w . j a va 2s . com }
From source file:org.anc.lapps.nlp4j.NLP4JCustomTrain.java
/** * Entry point for a Lappsgrid service./*ww w. java 2 s . c o m*/ * <p> * Each service on the Lappsgrid will accept {@code org.lappsgrid.serialization.Data} object * and return a {@code Data} object with a {@code org.lappsgrid.serialization.lif.Container} * payload. * <p> * Errors and exceptions that occur during processing should be wrapped in a {@code Data} * object with the discriminator set to http://vocab.lappsgrid.org/ns/error * <p> * See <a href="https://lapp.github.io/org.lappsgrid.serialization/index.html?org/lappsgrid/serialization/Data.html>org.lappsgrid.serialization.Data</a><br /> * See <a href="https://lapp.github.io/org.lappsgrid.serialization/index.html?org/lappsgrid/serialization/lif/Container.html>org.lappsgrid.serialization.lif.Container</a><br /> * * @param input A JSON string representing a Data object * @return A JSON string containing a Data object with a Container payload. */ @Override public String execute(String input) { logger = LoggerFactory.getLogger(NLP4JCustomTrain.class); // Parse the JSON string into a Data object, and extract its discriminator. Data<String> data = Serializer.parse(input, Data.class); String discriminator = data.getDiscriminator(); // If the Input discriminator is ERROR, return the Data as is, since it's already a wrapped error. if (Discriminators.Uri.ERROR.equals(discriminator)) { return input; } // If the Input discriminator is not GET, return a wrapped Error with an appropriate message. else if (!Discriminators.Uri.GET.equals(discriminator)) { String errorData = generateError( "Invalid discriminator.\nExpected " + Discriminators.Uri.GET + "\nFound " + discriminator); logger.error(errorData); return errorData; } // Output an error if no payload is given, since an input is required to run the program if (data.getPayload() == null) { String errorData = generateError("No input given."); logger.error(errorData); return errorData; } // Else (if a payload is given), process the input else { // Create temporary directories to hold input and output. This is needed because // the RankLib methods need directories for most of their processing, so the input // will be given within files in a directory, and the output will be read from files // in the output directory. Path outputDirPath = null; Path inputDirPath = null; try { outputDirPath = Files.createTempDirectory("output"); outputDirPath.toFile().deleteOnExit(); inputDirPath = Files.createTempDirectory("input"); inputDirPath.toFile().deleteOnExit(); } // Since we are only handling files created by the function, there should never be // a problem with these files. If there is, notify the user of the error. catch (IOException e) { String errorData = generateError("Error in creating temporary input/output directories."); logger.error(errorData); return errorData; } StringBuilder params = new StringBuilder("-c "); try { String configPath = makeConfigFile(inputDirPath, data); if (configPath.contains("ERROR")) { if (configPath.contains("INDEX ERROR")) { StringBuilder errorMsg = new StringBuilder( "The given list of TSV indices and TSV fields did not match.\r\n"); String[] errorParts; errorParts = configPath.split(";"); errorMsg.append("Given indices: ").append(errorParts[1]); errorMsg.append("\r\nGiven fields: ").append(errorParts[2]); String errorData = generateError(errorMsg.toString()); logger.error(errorData); return errorData; } else if (configPath.contains("AMBIGUITY ERROR")) { StringBuilder errorMsg = new StringBuilder( "Invalid field given for ambiguity classes.\r\n"); String[] errorParts; errorParts = configPath.split(";"); errorMsg.append("Given: ").append(errorParts[1]); String errorData = generateError(errorMsg.toString()); logger.error(errorData); return errorData; } else if (configPath.contains("CLUSTERS ERROR")) { StringBuilder errorMsg = new StringBuilder("Invalid field given for word clusters.\r\n"); String[] errorParts; errorParts = configPath.split(";"); errorMsg.append("Given: ").append(errorParts[1]); String errorData = generateError(errorMsg.toString()); logger.error(errorData); return errorData; } else if (configPath.contains("NAMED ENTITY ERROR")) { StringBuilder errorMsg = new StringBuilder( "Invalid field given for named entity gazetteers.\r\n"); String[] errorParts; errorParts = configPath.split(";"); errorMsg.append("Given: ").append(errorParts[1]); String errorData = generateError(errorMsg.toString()); logger.error(errorData); return errorData; } else if (configPath.contains("EMBEDDINGS ERROR")) { StringBuilder errorMsg = new StringBuilder("Invalid field given for word embeddings.\r\n"); String[] errorParts; errorParts = configPath.split(";"); errorMsg.append("Given: ").append(errorParts[1]); String errorData = generateError(errorMsg.toString()); logger.error(errorData); return errorData; } else if (configPath.contains("ALGORITHM ERROR")) { StringBuilder errorMsg = new StringBuilder( "Invalid name given for optimizer algorithm.\r\n"); String[] errorParts; errorParts = configPath.split(";"); errorMsg.append("Given: ").append(errorParts[1]); String errorData = generateError(errorMsg.toString()); logger.error(errorData); return errorData; } else if (configPath.contains("INVALID FEATURE SOURCE ERROR")) { StringBuilder errorMsg = new StringBuilder("Invalid source given for feature.\r\n"); String[] errorParts; errorParts = configPath.split(";"); errorMsg.append("Given: ").append(errorParts[1]); errorMsg.append("\r\nFeature line number: ").append(errorParts[2]); errorMsg.append("\r\nFeature number: f").append(errorParts[3]); String errorData = generateError(errorMsg.toString()); logger.error(errorData); return errorData; } else if (configPath.contains("INVALID FEATURE RELATION ERROR")) { StringBuilder errorMsg = new StringBuilder("Invalid relation given for feature.\r\n"); String[] errorParts; errorParts = configPath.split(";"); errorMsg.append("Given: ").append(errorParts[1]); errorMsg.append("\r\nFeature line number: ").append(errorParts[2]); errorMsg.append("\r\nFeature number: f").append(errorParts[3]); String errorData = generateError(errorMsg.toString()); logger.error(errorData); return errorData; } else if (configPath.contains("INVALID FEATURE FIELD ERROR")) { StringBuilder errorMsg = new StringBuilder("Invalid field given for feature.\r\n"); String[] errorParts; errorParts = configPath.split(";"); errorMsg.append("Given: ").append(errorParts[1]); errorMsg.append("\r\nFeature line number: ").append(errorParts[2]); errorMsg.append("\r\nFeature number: f").append(errorParts[3]); String errorData = generateError(errorMsg.toString()); logger.error(errorData); return errorData; } else { StringBuilder errorMsg = new StringBuilder( "Unknown error found in configuration parameters.\r\n"); errorMsg.append("String returned: ").append(configPath); String errorData = generateError(errorMsg.toString()); logger.error(errorData); return errorData; } } // Call the method that converts the parameters to the format that they would // be in when given from command-line. params.append(configPath); String convertedParams = convertParameters(data, outputDirPath, inputDirPath).replace("\\", "/"); if (convertedParams.contains("ERROR")) { if (convertedParams.contains("MODE ERROR")) { StringBuilder errorMsg = new StringBuilder("Invalid mode parameter given.\r\n"); String[] errorParts; errorParts = configPath.split(";"); errorMsg.append("Given: ").append(errorParts[1]); String errorData = generateError(errorMsg.toString()); logger.error(errorData); return errorData; } } params.append(convertedParams); } // Since we are only handling files created by the function, there should never be // a problem with these files. If there is notify catch (IOException e) { String errorData = generateError("Error in handling of temporary files."); logger.error(errorData); return errorData; } String[] paramsArray; // Split the parameters into an array, which will be given as the args[] argument // to the main methods of RankLib. try { paramsArray = params.toString().split("\\s+"); } catch (PatternSyntaxException ex) { String errorData = generateError("Error in parameter syntax."); logger.error(errorData); return errorData; } // Create a stream to hold the output from System.out.println. This is necessary // because when running, the program will print things from many RankLib classes and // methods. So the printed output will be "caught" and saved to output. ByteArrayOutputStream baos = new ByteArrayOutputStream(); PrintStream ps = new PrintStream(baos); // Save the old System.out PrintStream, to reset at the end of the program. PrintStream oldPrintStream = System.out; // Set the special stream as the out stream System.setOut(ps); NLPTrain.main(paramsArray); // Set System.out back to the original PrintStream System.out.flush(); System.setOut(oldPrintStream); // Make a Map to hold both the printed, and file outputs. Map<String, String> outputPayload = new HashMap<>(); String finalPrint; if (data.getParameter("saveModel") != null) { StringBuilder toRemove = new StringBuilder( "Name not implemented for OnlineComponent. Input name - "); toRemove.append(data.getParameter("saveModel")).append(".xz will be ignored.\r\n"); finalPrint = baos.toString().replace(toRemove.toString(), ""); } else { finalPrint = baos.toString(); } // Add the printed text caught from the out stream to the payload // with the "Printed" key outputPayload.put("Printed", finalPrint); // Parse the Map to Json, then put it as a payload to a Data object with a LAPPS // discriminator and return it as the final output String outputJson = Serializer.toJson(outputPayload); Data<String> output = new Data<>(Discriminators.Uri.LAPPS, outputJson); return output.asPrettyJson(); } }
From source file:catalina.startup.Catalina.java
/** * Start a new server instance./*w w w . j a v a 2s. c o m*/ */ protected void start() { // Create and execute our Digester Digester digester = createStartDigester(); File file = configFile(); try { InputSource is = new InputSource("file://" + file.getAbsolutePath()); FileInputStream fis = new FileInputStream(file); is.setByteStream(fis); digester.push(this); digester.parse(is); fis.close(); } catch (Exception e) { System.out.println("Catalina.start: " + e); e.printStackTrace(System.out); System.exit(1); } // Setting additional variables if (!useNaming) { System.setProperty("catalina.useNaming", "false"); } else { System.setProperty("catalina.useNaming", "true"); String value = "org.apache.naming"; String oldValue = System.getProperty(javax.naming.Context.URL_PKG_PREFIXES); if (oldValue != null) { value = value + ":" + oldValue; } System.setProperty(javax.naming.Context.URL_PKG_PREFIXES, value); value = System.getProperty(javax.naming.Context.INITIAL_CONTEXT_FACTORY); if (value == null) { System.setProperty(javax.naming.Context.INITIAL_CONTEXT_FACTORY, "org.apache.naming.java.javaURLContextFactory"); } } // If a SecurityManager is being used, set properties for // checkPackageAccess() and checkPackageDefinition if (System.getSecurityManager() != null) { String access = Security.getProperty("package.access"); if (access != null && access.length() > 0) access += ","; else access = "sun.,"; Security.setProperty("package.access", access + "org.apache.catalina.,org.apache.jasper."); String definition = Security.getProperty("package.definition"); if (definition != null && definition.length() > 0) definition += ","; else definition = "sun.,"; Security.setProperty("package.definition", // FIX ME package "javax." was removed to prevent HotSpot // fatal internal errors definition + "java.,org.apache.catalina.,org.apache.jasper."); } // Replace System.out and System.err with a custom PrintStream SystemLogHandler log = new SystemLogHandler(System.out); System.setOut(log); System.setErr(log); Thread shutdownHook = new CatalinaShutdownHook(); // Start the new server if (server instanceof Lifecycle) { try { server.initialize(); ((Lifecycle) server).start(); try { // Register shutdown hook Runtime.getRuntime().addShutdownHook(shutdownHook); } catch (Throwable t) { // This will fail on JDK 1.2. Ignoring, as Tomcat can run // fine without the shutdown hook. } // Wait for the server to be told to shut down server.await(); } catch (LifecycleException e) { System.out.println("Catalina.start: " + e); e.printStackTrace(System.out); if (e.getThrowable() != null) { System.out.println("----- Root Cause -----"); e.getThrowable().printStackTrace(System.out); } } } // Shut down the server if (server instanceof Lifecycle) { try { try { // Remove the ShutdownHook first so that server.stop() // doesn't get invoked twice Runtime.getRuntime().removeShutdownHook(shutdownHook); } catch (Throwable t) { // This will fail on JDK 1.2. Ignoring, as Tomcat can run // fine without the shutdown hook. } ((Lifecycle) server).stop(); } catch (LifecycleException e) { System.out.println("Catalina.stop: " + e); e.printStackTrace(System.out); if (e.getThrowable() != null) { System.out.println("----- Root Cause -----"); e.getThrowable().printStackTrace(System.out); } } } }
From source file:com.github.tomakehurst.wiremock.StandaloneAcceptanceTest.java
private void startRecordingSystemOut() { out = new ByteArrayOutputStream(); System.setOut(new PrintStream(out)); }
From source file:org.kchine.r.server.DirectJNI.java
public String runR(ExecutionUnit eu, HashMap<String, Object> clientProperties) { if (Thread.currentThread() == _rEngine) { throw new RuntimeException("runR called from within the R MainLoop Thread"); } else {/*ww w . ja v a2s . c o m*/ _mainLock.lock(); _clientProperties = clientProperties; try { boolean hasConsoleInput = (eu.getConsoleInput() != null && !eu.getConsoleInput().equals("")); String consoleLog = null; _runRlock.lock(); try { _sharedExecutionUnit = eu; try { _availableCondition.await(); } catch (InterruptedException e) { e.printStackTrace(); } } finally { _runRlock.unlock(); } consoleLog = _sharedBuffer.toString(); if (hasConsoleInput && consoleLog.trim().equals("")) { System.setOut(new PrintStream(new OutputStream() { public void write(final byte[] b) throws IOException { _o.write(b); if (new String(b).startsWith(_continueStr)) { HashMap<String, Object> attrs = new HashMap<String, Object>(); attrs.put("log", _continueStr); notifyRActionListeners(new RConsoleAction("APPEND_CONSOLE_CONTINUE", attrs)); } } public void write(final byte[] b, final int off, final int len) throws IOException { _o.write(b, off, len); if (new String(b, off, len).startsWith(_continueStr)) { HashMap<String, Object> attrs = new HashMap<String, Object>(); attrs.put("log", _continueStr); notifyRActionListeners(new RConsoleAction("APPEND_CONSOLE_CONTINUE", attrs)); } } public void write(final int b) throws IOException { _o.write(b); } })); _runRlock.lock(); try { _sharedExecutionUnit = new ExecutionUnit() { public void run(Rengine e) { } public boolean emptyConsoleBufferBefore() { return false; } public String getConsoleInput() { return " "; } }; try { _availableCondition.await(); } catch (InterruptedException e) { e.printStackTrace(); } } finally { _runRlock.unlock(); } System.setOut(_o); } return _sharedBuffer.toString(); } finally { _clientProperties = null; _mainLock.unlock(); } } }
From source file:org.apache.hadoop.mapreduce.v2.hs.TestJobHistoryParsing.java
private void checkHistoryParsing(final int numMaps, final int numReduces, final int numSuccessfulMaps) throws Exception { Configuration conf = new Configuration(); conf.set(MRJobConfig.USER_NAME, System.getProperty("user.name")); long amStartTimeEst = System.currentTimeMillis(); conf.setClass(NET_TOPOLOGY_NODE_SWITCH_MAPPING_IMPL_KEY, MyResolver.class, DNSToSwitchMapping.class); RackResolver.init(conf);//from w ww .jav a2 s .c o m MRApp app = new MRAppWithHistory(numMaps, numReduces, true, this.getClass().getName(), true); app.submit(conf); Job job = app.getContext().getAllJobs().values().iterator().next(); JobId jobId = job.getID(); LOG.info("JOBID is " + TypeConverter.fromYarn(jobId).toString()); app.waitForState(job, JobState.SUCCEEDED); // make sure all events are flushed app.waitForState(Service.STATE.STOPPED); String jobhistoryDir = JobHistoryUtils.getHistoryIntermediateDoneDirForUser(conf); FileContext fc = null; try { fc = FileContext.getFileContext(conf); } catch (IOException ioe) { LOG.info("Can not get FileContext", ioe); throw (new Exception("Can not get File Context")); } if (numMaps == numSuccessfulMaps) { String summaryFileName = JobHistoryUtils.getIntermediateSummaryFileName(jobId); Path summaryFile = new Path(jobhistoryDir, summaryFileName); String jobSummaryString = getJobSummary(fc, summaryFile); Assert.assertNotNull(jobSummaryString); Assert.assertTrue(jobSummaryString.contains("resourcesPerMap=100")); Assert.assertTrue(jobSummaryString.contains("resourcesPerReduce=100")); Map<String, String> jobSummaryElements = new HashMap<String, String>(); StringTokenizer strToken = new StringTokenizer(jobSummaryString, ","); while (strToken.hasMoreTokens()) { String keypair = strToken.nextToken(); jobSummaryElements.put(keypair.split("=")[0], keypair.split("=")[1]); } Assert.assertEquals("JobId does not match", jobId.toString(), jobSummaryElements.get("jobId")); Assert.assertEquals("JobName does not match", "test", jobSummaryElements.get("jobName")); Assert.assertTrue("submitTime should not be 0", Long.parseLong(jobSummaryElements.get("submitTime")) != 0); Assert.assertTrue("launchTime should not be 0", Long.parseLong(jobSummaryElements.get("launchTime")) != 0); Assert.assertTrue("firstMapTaskLaunchTime should not be 0", Long.parseLong(jobSummaryElements.get("firstMapTaskLaunchTime")) != 0); Assert.assertTrue("firstReduceTaskLaunchTime should not be 0", Long.parseLong(jobSummaryElements.get("firstReduceTaskLaunchTime")) != 0); Assert.assertTrue("finishTime should not be 0", Long.parseLong(jobSummaryElements.get("finishTime")) != 0); Assert.assertEquals("Mismatch in num map slots", numSuccessfulMaps, Integer.parseInt(jobSummaryElements.get("numMaps"))); Assert.assertEquals("Mismatch in num reduce slots", numReduces, Integer.parseInt(jobSummaryElements.get("numReduces"))); Assert.assertEquals("User does not match", System.getProperty("user.name"), jobSummaryElements.get("user")); Assert.assertEquals("Queue does not match", "default", jobSummaryElements.get("queue")); Assert.assertEquals("Status does not match", "SUCCEEDED", jobSummaryElements.get("status")); } JobHistory jobHistory = new JobHistory(); jobHistory.init(conf); HistoryFileInfo fileInfo = jobHistory.getJobFileInfo(jobId); JobInfo jobInfo; long numFinishedMaps; synchronized (fileInfo) { Path historyFilePath = fileInfo.getHistoryFile(); FSDataInputStream in = null; LOG.info("JobHistoryFile is: " + historyFilePath); try { in = fc.open(fc.makeQualified(historyFilePath)); } catch (IOException ioe) { LOG.info("Can not open history file: " + historyFilePath, ioe); throw (new Exception("Can not open History File")); } JobHistoryParser parser = new JobHistoryParser(in); final EventReader realReader = new EventReader(in); EventReader reader = Mockito.mock(EventReader.class); if (numMaps == numSuccessfulMaps) { reader = realReader; } else { final AtomicInteger numFinishedEvents = new AtomicInteger(0); // Hack! Mockito.when(reader.getNextEvent()).thenAnswer(new Answer<HistoryEvent>() { public HistoryEvent answer(InvocationOnMock invocation) throws IOException { HistoryEvent event = realReader.getNextEvent(); if (event instanceof TaskFinishedEvent) { numFinishedEvents.incrementAndGet(); } if (numFinishedEvents.get() <= numSuccessfulMaps) { return event; } else { throw new IOException("test"); } } }); } jobInfo = parser.parse(reader); numFinishedMaps = computeFinishedMaps(jobInfo, numMaps, numSuccessfulMaps); if (numFinishedMaps != numMaps) { Exception parseException = parser.getParseException(); Assert.assertNotNull("Didn't get expected parse exception", parseException); } } Assert.assertEquals("Incorrect username ", System.getProperty("user.name"), jobInfo.getUsername()); Assert.assertEquals("Incorrect jobName ", "test", jobInfo.getJobname()); Assert.assertEquals("Incorrect queuename ", "default", jobInfo.getJobQueueName()); Assert.assertEquals("incorrect conf path", "test", jobInfo.getJobConfPath()); Assert.assertEquals("incorrect finishedMap ", numSuccessfulMaps, numFinishedMaps); Assert.assertEquals("incorrect finishedReduces ", numReduces, jobInfo.getFinishedReduces()); Assert.assertEquals("incorrect uberized ", job.isUber(), jobInfo.getUberized()); Map<TaskID, TaskInfo> allTasks = jobInfo.getAllTasks(); int totalTasks = allTasks.size(); Assert.assertEquals("total number of tasks is incorrect ", (numMaps + numReduces), totalTasks); // Verify aminfo Assert.assertEquals(1, jobInfo.getAMInfos().size()); Assert.assertEquals(MRApp.NM_HOST, jobInfo.getAMInfos().get(0).getNodeManagerHost()); AMInfo amInfo = jobInfo.getAMInfos().get(0); Assert.assertEquals(MRApp.NM_PORT, amInfo.getNodeManagerPort()); Assert.assertEquals(MRApp.NM_HTTP_PORT, amInfo.getNodeManagerHttpPort()); Assert.assertEquals(1, amInfo.getAppAttemptId().getAttemptId()); Assert.assertEquals(amInfo.getAppAttemptId(), amInfo.getContainerId().getApplicationAttemptId()); Assert.assertTrue( amInfo.getStartTime() <= System.currentTimeMillis() && amInfo.getStartTime() >= amStartTimeEst); ContainerId fakeCid = MRApp.newContainerId(-1, -1, -1, -1); // Assert at taskAttempt level for (TaskInfo taskInfo : allTasks.values()) { int taskAttemptCount = taskInfo.getAllTaskAttempts().size(); Assert.assertEquals("total number of task attempts ", 1, taskAttemptCount); TaskAttemptInfo taInfo = taskInfo.getAllTaskAttempts().values().iterator().next(); Assert.assertNotNull(taInfo.getContainerId()); // Verify the wrong ctor is not being used. Remove after mrv1 is removed. Assert.assertFalse(taInfo.getContainerId().equals(fakeCid)); } // Deep compare Job and JobInfo for (Task task : job.getTasks().values()) { TaskInfo taskInfo = allTasks.get(TypeConverter.fromYarn(task.getID())); Assert.assertNotNull("TaskInfo not found", taskInfo); for (TaskAttempt taskAttempt : task.getAttempts().values()) { TaskAttemptInfo taskAttemptInfo = taskInfo.getAllTaskAttempts() .get(TypeConverter.fromYarn((taskAttempt.getID()))); Assert.assertNotNull("TaskAttemptInfo not found", taskAttemptInfo); Assert.assertEquals("Incorrect shuffle port for task attempt", taskAttempt.getShufflePort(), taskAttemptInfo.getShufflePort()); if (numMaps == numSuccessfulMaps) { Assert.assertEquals(MRApp.NM_HOST, taskAttemptInfo.getHostname()); Assert.assertEquals(MRApp.NM_PORT, taskAttemptInfo.getPort()); // Verify rack-name Assert.assertEquals("rack-name is incorrect", taskAttemptInfo.getRackname(), RACK_NAME); } } } // test output for HistoryViewer PrintStream stdps = System.out; try { System.setOut(new PrintStream(outContent)); HistoryViewer viewer; synchronized (fileInfo) { viewer = new HistoryViewer(fc.makeQualified(fileInfo.getHistoryFile()).toString(), conf, true); } viewer.print(); for (TaskInfo taskInfo : allTasks.values()) { String test = (taskInfo.getTaskStatus() == null ? "" : taskInfo.getTaskStatus()) + " " + taskInfo.getTaskType() + " task list for " + taskInfo.getTaskId().getJobID(); Assert.assertTrue(outContent.toString().indexOf(test) > 0); Assert.assertTrue(outContent.toString().indexOf(taskInfo.getTaskId().toString()) > 0); } } finally { System.setOut(stdps); } }
From source file:org.kie.workbench.common.services.backend.compiler.external339.AFMavenCli.java
protected void logging(AFCliRequest cliRequest) { cliRequest.setDebug(cliRequest.getCommandLine().hasOption(CLIManager.DEBUG)); cliRequest.setQuiet(!cliRequest.isDebug() && cliRequest.getCommandLine().hasOption(CLIManager.QUIET)); cliRequest.setShowErrors(cliRequest.isDebug() || cliRequest.getCommandLine().hasOption(CLIManager.ERRORS)); slf4jLoggerFactory = LoggerFactory.getILoggerFactory(); Slf4jConfiguration slf4jConfiguration = Slf4jConfigurationFactory.getConfiguration(slf4jLoggerFactory); if (cliRequest.isDebug()) { cliRequest.getRequest().setLoggingLevel(MavenExecutionRequest.LOGGING_LEVEL_DEBUG); slf4jConfiguration.setRootLoggerLevel(Slf4jConfiguration.Level.DEBUG); } else if (cliRequest.isQuiet()) { cliRequest.getRequest().setLoggingLevel(MavenExecutionRequest.LOGGING_LEVEL_ERROR); slf4jConfiguration.setRootLoggerLevel(Slf4jConfiguration.Level.ERROR); }/*from ww w .j a va2s . c om*/ if (cliRequest.getCommandLine().hasOption(CLIManager.LOG_FILE)) { File logFile = new File(cliRequest.getCommandLine().getOptionValue(CLIManager.LOG_FILE).trim()); logFile = resolveFile(logFile, cliRequest.getWorkingDirectory()); //@MAX try { PrintStream ps = new PrintStream(new FileOutputStream(logFile)); System.setOut(ps); System.setErr(ps); } catch (FileNotFoundException e) { logger.error(e.getMessage()); } } slf4jConfiguration.activate(); plexusLoggerManager = new Slf4jLoggerManager(); slf4jLogger = slf4jLoggerFactory.getLogger(this.getClass().getName()); }
From source file:hudson.remoting.Launcher.java
private void runWithStdinStdout() throws IOException, InterruptedException { // use stdin/stdout for channel communication ttyCheck();/*from ww w.j a va 2s. co m*/ if (isWindows()) { /* To prevent the dead lock between GetFileType from _ioinit in C runtime and blocking read that ChannelReaderThread would do on stdin, load the crypto DLL first. This is a band-aid solution to the problem. Still searching for more fundamental fix. 02f1e750 7c90d99a ntdll!KiFastSystemCallRet 02f1e754 7c810f63 ntdll!NtQueryVolumeInformationFile+0xc 02f1e784 77c2c9f9 kernel32!GetFileType+0x7e 02f1e7e8 77c1f01d msvcrt!_ioinit+0x19f 02f1e88c 7c90118a msvcrt!__CRTDLL_INIT+0xac 02f1e8ac 7c91c4fa ntdll!LdrpCallInitRoutine+0x14 02f1e9b4 7c916371 ntdll!LdrpRunInitializeRoutines+0x344 02f1ec60 7c9164d3 ntdll!LdrpLoadDll+0x3e5 02f1ef08 7c801bbd ntdll!LdrLoadDll+0x230 02f1ef70 7c801d72 kernel32!LoadLibraryExW+0x18e 02f1ef84 7c801da8 kernel32!LoadLibraryExA+0x1f 02f1efa0 77de8830 kernel32!LoadLibraryA+0x94 02f1f05c 6d3eb1be ADVAPI32!CryptAcquireContextA+0x512 WARNING: Stack unwind information not available. Following frames may be wrong. 02f1f13c 6d99c844 java_6d3e0000!Java_sun_security_provider_NativeSeedGenerator_nativeGenerateSeed+0x6e see http://weblogs.java.net/blog/kohsuke/archive/2009/09/28/reading-stdin-may-cause-your-jvm-hang for more details */ new SecureRandom().nextBoolean(); } // this will prevent programs from accidentally writing to System.out // and messing up the stream. OutputStream os = System.out; System.setOut(System.err); main(System.in, os, mode, ping); }