List of usage examples for java.io OutputStream toString
public String toString()
From source file:org.sdm.spa.actors.transport.SrmliteCopier.java
protected int fileListCopyFrom(String cmdWithPath, String HostStr) throws Exception { int exitCode = 0; LocalExec localObject = new LocalExec(); localObject.setTimeout(timeout, false, false); OutputStream streamOut = new ByteArrayOutputStream(); OutputStream streamErr = new ByteArrayOutputStream(); exitCode = localObject.executeCmd(cmdWithPath, streamOut, streamErr, HostStr); if (isDebugging) { log.error("Output on stdout:" + streamOut); log.error("Output on stderr:" + streamErr); }// www .j a va2s . co m String message = streamErr.toString(); message = message + " \n\n" + streamOut.toString(); log.debug(message); return exitCode; }
From source file:me.cavar.pg2tei.TEIDoc.java
/** * * @return/* w w w.j a v a 2 s.c o m*/ */ public String getPrettyPrint() { OutputStream bOut = new ByteArrayOutputStream(); String ret = ""; try { for (String tmptitle : this.title) { bOut.write(tmptitle.getBytes()); bOut.write("\n".getBytes()); } bOut.write(this.friendlyTitle.getBytes()); bOut.write("\n".getBytes()); bOut.write(this.creator.getBytes()); bOut.write("\n".getBytes()); bOut.write(this.publisher.getBytes()); bOut.write("\n".getBytes()); bOut.write(this.description.getBytes()); bOut.write("\n".getBytes()); ret = bOut.toString(); bOut.close(); } catch (IOException e) { Logger.getLogger(TEIDoc.class.getName()).log(Level.SEVERE, null, e); } return ret; }
From source file:grakn.core.console.test.GraknConsoleIT.java
private Response runConsoleSession(String input, String... args) { args = addKeyspaceAndUriParams(args); OutputStream bufferOut = new ByteArrayOutputStream(); OutputStream bufferErr = new ByteArrayOutputStream(); PrintStream printOut = new PrintStream(new TeeOutputStream(bufferOut, System.out)); PrintStream printErr = new PrintStream(new TeeOutputStream(bufferErr, System.err)); try {//w ww . j a va 2s. c o m System.setIn(new ByteArrayInputStream(input.getBytes())); GraknConsole console = new GraknConsole(args, printOut, printErr); console.run(); } catch (Exception e) { printErr.println(e.getMessage()); printErr.flush(); } finally { resetIO(); } printOut.flush(); printErr.flush(); return Response.of(bufferOut.toString(), bufferErr.toString()); }
From source file:net.itransformers.idiscover.v2.core.discovererIntegrationTest.metroE.IntegrationTestsMetroE.java
@Test public void testR112() { Map<String, String> resourceParams = new HashMap<String, String>(); // Resource resource = new Resource("R1", "10.17.1.13", resourceParams); // resource.setDeviceType("CISCO"); FileInputStream is = null;/*from ww w .j av a 2 s. c o m*/ try { is = new FileInputStream( "iDiscover/netDiscoverer/src/test/resources/raw-data-metroE/raw-data-R-112.xml"); } catch (FileNotFoundException e) { e.printStackTrace(); } byte[] data = null; try { data = new byte[is.available()]; is.read(data); } catch (IOException e) { e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates. } rawDeviceData.setData(data); DiscoveryHelper discoveryHelper = discoveryHelperFactory.createDiscoveryHelper("CISCO"); resourceParams.put("neighbourIPDryRun", "true"); DiscoveredDeviceData discoveredDeviceData = discoveryHelper.parseDeviceRawData(rawDeviceData, discoveryTypes, resourceParams); Map<String, HashMap<String, String>> discoveredDevices = new HashMap<String, HashMap<String, String>>(); HashMap<String, String> s11218 = new HashMap<String, String>(); s11218.put("snmp", "S-112-18"); s11218.put("deviceType", "CISCO"); discoveredDevices.put("10.32.249.87", s11218); HashMap<String, String> s11227 = new HashMap<String, String>(); s11227.put("snmp", "S-112-27"); s11227.put("deviceType", "CISCO"); discoveredDevices.put("10.32.249.119", s11227); HashMap<String, String> s1120 = new HashMap<String, String>(); s1120.put("snmp", "S-112-0"); s1120.put("deviceType", "CISCO"); discoveredDevices.put("10.32.250.51", s1120); // S-112-3 HashMap<String, String> s1123 = new HashMap<String, String>(); s1123.put("snmp", "S-112-3"); s1123.put("deviceType", "CISCO"); discoveredDevices.put("10.32.250.56", s1123); //M-321 HashMap<String, String> m321 = new HashMap<String, String>(); m321.put("snmp", "M-321"); m321.put("deviceType", "CISCO"); discoveredDevices.put("10.32.219.53", m321); //172.16.2.98 HashMap<String, String> n17216298 = new HashMap<String, String>(); n17216298.put("snmp", ""); n17216298.put("deviceType", "UNKNOWN"); discoveredDevices.put("172.16.2.98", n17216298); //212.248.1.126 //SnmpForXslt.resolveIPAddresses(discoveryResource,); SnmpForXslt.setDiscoveredIPs(discoveredDevices); resourceParams.put("neighbourIPDryRun", "false"); OutputStream os = null; discoveredDeviceData = discoveryHelper.parseDeviceRawData(rawDeviceData, discoveryTypes, resourceParams); try { os = new ByteArrayOutputStream(); JaxbMarshalar.marshal(discoveredDeviceData, os, "DiscoveredDevice"); String str = os.toString(); System.out.println(str); } catch (JAXBException e) { e.printStackTrace(); } finally { if (os != null) try { os.close(); } catch (IOException e) { } } Map<String, Integer> neighbourTypeCounts = fillInNeighbourTree(discoveredDeviceData.getObject()); System.out.println(neighbourTypeCounts); //Assert.assertEquals((Object) 53,neighbourTypeCounts.get("c_OSPF")); Assert.assertEquals((Object) 7, neighbourTypeCounts.get("CDP")); Assert.assertEquals((Object) 6, neighbourTypeCounts.get("Slash30")); Assert.assertEquals((Object) 7, neighbourTypeCounts.get("c_OSPF")); Assert.assertEquals((Object) 1, neighbourTypeCounts.get("Slash31")); Assert.assertEquals((Object) 16, neighbourTypeCounts.get("UNKNOWN")); Assert.assertEquals((Object) 5, neighbourTypeCounts.get("CISCO")); }
From source file:org.apache.hive.beeline.TestSchemaTool.java
/** * Test schema upgrade/*from www. j a va 2s. c o m*/ * @throws Exception */ public void testSchemaUpgrade() throws Exception { boolean foundException = false; // Initialize 0.7.0 schema schemaTool.doInit("0.7.0"); // verify that driver fails due to older version schema try { schemaTool.verifySchemaVersion(); } catch (HiveMetaException e) { // Expected to fail due to old schema foundException = true; } if (!foundException) { throw new Exception("Hive operations shouldn't pass with older version schema"); } // Generate dummy pre-upgrade script with errors String invalidPreUpgradeScript = writeDummyPreUpgradeScript(0, "upgrade-0.11.0-to-0.12.0.derby.sql", "foo bar;"); // Generate dummy pre-upgrade scripts with valid SQL String validPreUpgradeScript0 = writeDummyPreUpgradeScript(0, "upgrade-0.12.0-to-0.13.0.derby.sql", "CREATE TABLE schema_test0 (id integer);"); String validPreUpgradeScript1 = writeDummyPreUpgradeScript(1, "upgrade-0.12.0-to-0.13.0.derby.sql", "CREATE TABLE schema_test1 (id integer);"); // Capture system out and err schemaTool.setVerbose(true); OutputStream stderr = new ByteArrayOutputStream(); PrintStream errPrintStream = new PrintStream(stderr); System.setErr(errPrintStream); OutputStream stdout = new ByteArrayOutputStream(); PrintStream outPrintStream = new PrintStream(stdout); System.setOut(outPrintStream); // Upgrade schema from 0.7.0 to latest schemaTool.doUpgrade("0.7.0"); // Verify that the schemaTool ran pre-upgrade scripts and ignored errors assertTrue(stderr.toString().contains(invalidPreUpgradeScript)); assertTrue(stderr.toString().contains("foo")); assertFalse(stderr.toString().contains(validPreUpgradeScript0)); assertFalse(stderr.toString().contains(validPreUpgradeScript1)); assertTrue(stdout.toString().contains(validPreUpgradeScript0)); assertTrue(stdout.toString().contains(validPreUpgradeScript1)); // Verify that driver works fine with latest schema schemaTool.verifySchemaVersion(); }
From source file:com.vmware.vhadoop.adaptor.hadoop.HadoopAdaptor.java
@Override public CompoundStatus checkTargetTTsSuccess(String opType, String[] affectedTTs, int totalTargetEnabled, HadoopCluster cluster) {//from ww w . j a v a2 s. c om CompoundStatus status = new CompoundStatus("checkTargetTTsSuccess"); String scriptFileName = CHECK_SCRIPT_FILE_NAME; String scriptRemoteFilePath = DEFAULT_SCRIPT_DEST_PATH + scriptFileName; String listRemoteFilePath = null; String opDesc = "checkTargetTTsSuccess"; _log.log(Level.INFO, "AffectedTTs:"); for (String tt : affectedTTs) { _log.log(Level.INFO, tt); } HadoopConnection connection = getConnectionForCluster(cluster); setErrorParamsForCommand(opDesc, scriptRemoteFilePath, listRemoteFilePath); int rc = -1; int iterations = 0; do { if (iterations > 0) { _log.log(Level.INFO, "Target TTs not yet achieved...checking again - " + iterations); } OutputStream out = new ByteArrayOutputStream(); rc = executeScriptWithCopyRetryOnFailure(connection, scriptFileName, new String[] { "" + totalTargetEnabled, connection.getHadoopHome() }, out); try { out.flush(); } catch (IOException e) { String errorMsg = "Unexpected exception in SSH OutputStream "; _log.log(Level.WARNING, errorMsg, e); status.registerTaskFailed(false, errorMsg + e.getMessage()); } //_log.log(Level.INFO, "Output from SSH script execution:\n"+out.toString()); /* Convert to String array and "nullify" last element (which happens to be "@@@..." or empty line) */ String[] allActiveTTs = out.toString().split("\n"); allActiveTTs[allActiveTTs.length - 1] = null; if (checkOpSuccess(opType, affectedTTs, allActiveTTs)) { _log.log(Level.INFO, "All selected TTs correctly %sed", opType.toLowerCase()); rc = SUCCESS; break; } //TODO: out.close()? } while ((rc == ERROR_FEWER_TTS || rc == ERROR_EXCESS_TTS) && (++iterations <= MAX_CHECK_RETRY_ITERATIONS)); status.addStatus(_errorCodes.interpretErrorCode(_log, rc, _errorParamValues)); return status; }
From source file:org.talend.dataprep.transformation.api.transformer.TransformerFactoryTest.java
@Test public void getExporter_csv_exporter_should_write_csv_format() throws Exception { // given//from w w w . j a v a2s.co m Map<String, String> arguments = new HashMap<>(); arguments.put(ExportFormat.PREFIX + "csvSeparator", ";"); final OutputStream outputStream = new ByteArrayOutputStream(); final Configuration configuration = Configuration.builder() // .args(arguments) // .format(CSV) // .output(outputStream) // .actions(IOUtils .toString(TransformerFactoryTest.class.getResourceAsStream("upper_case_firstname.json"))) // .build(); final Transformer transformer = factory.get(configuration); final String expectedCsv = IOUtils.toString(TransformerFactoryTest.class .getResourceAsStream("expected_export_preparation_uppercase_firstname.csv")); final InputStream inputStream = TransformerFactoryTest.class .getResourceAsStream("../../format/export_dataset.json"); try (JsonParser parser = mapper.getFactory().createParser(inputStream)) { final DataSet dataSet = mapper.readerFor(DataSet.class).readValue(parser); // when transformer.transform(dataSet, configuration); // then assertThat(outputStream.toString()).isEqualTo(expectedCsv); } }
From source file:org.meresco.triplestore.TransactionLogTest.java
@Test public void testCorruptedCurrentFileInTransactionLog() throws Exception { String currentData = "<transaction_item>\n" + " <action>add</action>\n" + " <identifier>test1.rdf</identifier>\n" + " <filedata>ignored</filedata>\n" + "<transaction_item>\n" + " <action>add</action>\n" + " <identifier>test2.rdf</identifier>\n" + " <filedata>ignored</filedata>\n" + "</transaction_item>\n"; Utils.write(transactionLog.transactionLogFilePath, currentData); OutputStream os = new ByteArrayOutputStream(); PrintStream ps = new PrintStream(os); PrintStream err = System.err; System.setErr(ps);/*from w w w.j a v a 2s. co m*/ try { transactionLog.recoverTripleStore(); fail("Should fail"); } catch (TransactionLogException e) { assertEquals("Corrupted transaction_item in " + transactionLog.transactionLogFilePath.getAbsolutePath() + " at line 9. This should never occur.", e.getMessage()); assertTrue(os.toString(), os.toString().contains("XML document structures must start and end within the same entity.")); } finally { System.setErr(err); } assertEquals(0, tsMock.actions.size()); }
From source file:ch.cyberduck.core.Path.java
/** * Will copy from in to out. Does not attempt to skip any bytes from the streams. * * @param in The stream to read from * @param out The stream to write to * @param throttle The bandwidth limit/*w ww .j a v a2 s. c o m*/ * @param l The stream listener to notify about bytes received and sent * @param status Transfer status * @throws IOException Write not completed due to a I/O problem * @throws ConnectionCanceledException When transfer is interrupted by user setting the * status flag to cancel. */ protected void download(final InputStream in, final OutputStream out, final BandwidthThrottle throttle, final StreamListener l, final TransferStatus status) throws IOException { if (log.isDebugEnabled()) { log.debug("download(" + in.toString() + ", " + out.toString()); } this.getSession() .message(MessageFormat.format(Locale.localizedString("Downloading {0}", "Status"), this.getName())); this.transfer(new ThrottledInputStream(in, throttle), out, l, -1, status); }
From source file:org.craftercms.studio.impl.v1.deployment.EnvironmentStoreGitDeployer.java
private InputStream createPatch(Repository repository, String site, String path) { try (Git git = new Git(repository)) { // the diff works on TreeIterators, we prepare two for the two branches AbstractTreeIterator oldTreeParser = prepareTreeParser(repository, "refs/heads/master"); AbstractTreeIterator newTreeParser = prepareTreeParser(repository, "FETCH_HEAD");//"refs/remotes/work-area/master"); // then the procelain diff-command returns a list of diff entries List<DiffEntry> diff = git.diff().setOldTree(oldTreeParser).setNewTree(newTreeParser) .setPathFilter(getTreeFilter(path)).call(); //PipedInputStream pin = new PipedInputStream(); OutputStream out = new ByteArrayOutputStream(); for (DiffEntry diffEntry : diff) { DiffEntry.ChangeType ct = diffEntry.getChangeType(); DiffFormatter df = new DiffFormatter(out); FileHeader fh = df.toFileHeader(diffEntry); if (fh.getPatchType().equals(FileHeader.PatchType.BINARY)) { logger.error("ERRRRRRRRRRROR"); }/*from w w w.j a v a2 s . com*/ } //OutputStream os = new FileOutputStream("/Users/dejanbrkic/gitpatchtest.diff"); DiffFormatter df = new DiffFormatter(out); df.setRepository(repository); //df.setBinaryFileThreshold(); df.setPathFilter(getTreeFilter(path)); df.setAbbreviationLength(OBJECT_ID_STRING_LENGTH); df.format(diff); df.flush(); df.close(); String content = out.toString(); logger.error("++++++++++++++++"); logger.error(content); logger.error("++++++++++++++++"); InputStream in = IOUtils.toInputStream(content); return in; } catch (GitAPIException | IOException e) { logger.error("Error while creating patch for site: " + site + " path: " + path, e); } return null; }