List of usage examples for java.io PipedInputStream PipedInputStream
public PipedInputStream(int pipeSize)
PipedInputStream
so that it is not yet #connect(java.io.PipedOutputStream) connected and uses the specified pipe size for the pipe's buffer. From source file:com.github.brandtg.switchboard.FileLogAggregator.java
/** * An agent that aggregates logs from multiple sources and multiplexes them. * * @param sources// w w w.j av a2 s . c om * A set of source switchboard servers from which to pull logs * @param separator * The line delimiter (after this is reached, lines will be output to outputStream) * @param outputStream * OutputStream to which all multiplexed logs are piped */ public FileLogAggregator(Set<InetSocketAddress> sources, char separator, OutputStream outputStream) throws IOException { this.separator = separator; this.outputStream = outputStream; this.isShutdown = new AtomicBoolean(true); this.eventExecutors = new NioEventLoopGroup(); this.logReceivers = new HashMap<>(); this.inputStreams = new HashMap<>(); this.listener = new Object(); this.muxExecutor = Executors.newSingleThreadExecutor(); this.stringBuffers = new HashMap<>(); for (InetSocketAddress source : sources) { PipedOutputStream pos = new PipedOutputStream(); PipedInputStream pis = new PipedInputStream(pos); LogReceiver logReceiver = new LogReceiver(new InetSocketAddress(0), eventExecutors, pos); logReceiver.registerListener(listener); logReceivers.put(source, logReceiver); inputStreams.put(source, pis); stringBuffers.put(source, new StringBuffer()); } }
From source file:com.flexive.core.storage.genericSQL.GenericBinarySQLOutputStream.java
/** * Ctor/*from ww w. j a va2 s . c o m*/ * * @param divisionId division * @param handle binary handle * @param mimeType mime type * @param expectedSize expected size of the binary * @param ttl time to live in the transit space * @throws IOException on errors * @throws SQLException if no connection could be obtained */ GenericBinarySQLOutputStream(int divisionId, String handle, String mimeType, long expectedSize, long ttl) throws IOException, SQLException { this.divisionId = divisionId; this.handle = handle; this.mimeType = mimeType; this.expectedSize = expectedSize; this.ttl = ttl; this.pipe = new PipedInputStream(this); this.count = 0L; this.rcvThread = new Thread(this); this.rcvThread.setDaemon(true); this.rcvThread.start(); }
From source file:org.sipfoundry.sipxconfig.cfgmgt.AgentRunner.java
/** * Run a full job at a location. Update job table for any failures in either running the * command or errors in stderr/*from w w w.j ava2 s . co m*/ */ void runJob(Location location, String label, String command) { PipedOutputStream log = null; Serializable job = m_jobContext.schedule(label, location); AgentResults results = new AgentResults(); Stack<String> errs; try { m_jobContext.start(job); log = new PipedOutputStream(); PipedInputStream in = new PipedInputStream(log); results.parse(in); int status = runCommand(command, log); errs = results.getResults(1000); if (errs.size() > 0) { String err = location.getFqdn() + ':' + errs.pop(); ConfigManagerImpl.fail(m_jobContext, label, job, new ConfigException(err)); while (!errs.empty()) { // Tricky alert - show additional errors as new jobs Serializable jobErr = m_jobContext.schedule(label, location); m_jobContext.start(jobErr); err = location.getFqdn() + ':' + errs.pop(); ConfigManagerImpl.fail(m_jobContext, label, jobErr, new ConfigException(err)); } } else if (status != 0 && errs.size() == 0) { String msg = format("Agent run on %s finshed but returned error code %d", location.getFqdn(), status); ConfigManagerImpl.fail(m_jobContext, label, job, new ConfigException(msg)); } else { m_jobContext.success(job); } } catch (Exception e) { ConfigManagerImpl.fail(m_jobContext, label, job, e); } finally { IOUtils.closeQuietly(log); } }
From source file:org.jumpmind.symmetric.transport.internal.InternalTransportManager.java
public IIncomingTransport getFilePullTransport(Node remote, final Node local, String securityToken, Map<String, String> requestProperties, String registrationUrl) throws IOException { final PipedOutputStream respOs = new PipedOutputStream(); final PipedInputStream respIs = new PipedInputStream(respOs); runAtClient(remote.getSyncUrl(), null, respOs, new IClientRunnable() { public void run(ISymmetricEngine engine, InputStream is, OutputStream os) throws Exception { IOutgoingTransport transport = new InternalOutgoingTransport(respOs, null); ProcessInfo processInfo = engine.getStatisticManager() .newProcessInfo(new ProcessInfoKey(engine.getNodeService().findIdentityNodeId(), local.getNodeId(), ProcessType.FILE_SYNC_PULL_HANDLER)); try { engine.getFileSyncService().sendFiles(processInfo, local, transport); processInfo.setStatus(Status.OK); } catch (RuntimeException ex) { processInfo.setStatus(Status.ERROR); throw ex; }// w ww .ja va2 s . c om transport.close(); } }); return new InternalIncomingTransport(respIs); }
From source file:ReaderInputStream.java
/** Creates new input stream from the given reader and encoding. * @param reader Input reader//w w w .jav a 2s. co m * @param encoding */ public ReaderInputStream(Reader reader, String encoding) throws IOException { this.reader = reader; pos = new PipedOutputStream(); pis = new PipedInputStream(pos); osw = new OutputStreamWriter(pos, encoding); }
From source file:org.openlaszlo.data.XMLGrabber.java
/** * Convert incoming XML to ... XML /*ww w .ja v a2 s. c o m*/ * * This method is called convertToSWF for historical reasons, and nobody * has changed the API call name yet. * * A dataset will look like this: * <resultset> * <body> * <weather sourceurl="http://www.srh.noaa.gov/zipcity.php?inputstring=02460"> * <radar src="http://www.laszlosystems.com:80/weather/small/kbox.jpg"/> * <satellite src="http://www.laszlosystems.com:80/weather/thumbs/ECI8.JPG"/> * </weather> * </body> * <headers> * <header name="Date" value="Thu, 29 Dec 2005 03:49:46 GMT"/> * <header name="Server" value="Apache/2.0.44 (Unix) mod_ssl/2.0.44 OpenSSL/0.9.6b DAV/2 mod_jk/1.2.1 PHP/4.3.0"/> * </headers> * </resultset> */ @Override public InputStream convertToSWF(Data data, HttpServletRequest req, HttpServletResponse res) throws ConversionException, IOException { try { PipedOutputStream pout = new PipedOutputStream(); PipedInputStream in = new PipedInputStream(pout); XmlSerializer serializer; XmlPullParser parser; parser = getXPPFactory().newPullParser(); InputStream dstream = data.getInputStream(); Reader reader = FileUtils.getXMLStreamReader(dstream, "UTF-8"); parser.setInput(reader); serializer = factory.newSerializer(); serializer.setOutput(pout, "UTF-8"); HttpMethodBase request = ((HttpData) data).getRequest(); final String sendheaders = req.getParameter("sendheaders"); XMLCopyThread worker = new XMLCopyThread(pout, parser, serializer, request, sendheaders); worker.start(); return in; } catch (XmlPullParserException ex) { throw new ConversionException("Parsing XML: " + ex.getMessage()); } }
From source file:hudson.scm.subversion.CheckoutUpdater.java
@Override public UpdateTask createTask() { return new UpdateTask() { private static final long serialVersionUID = 8349986526712487762L; @Override// ww w. j ava 2s. c om public List<External> perform() throws IOException, InterruptedException { final SVNUpdateClient svnuc = clientManager.getUpdateClient(); final List<External> externals = new ArrayList<External>(); // store discovered externals to here listener.getLogger().println("Cleaning local Directory " + location.getLocalDir()); Util.deleteContentsRecursive(new File(ws, location.getLocalDir())); // buffer the output by a separate thread so that the update operation // won't be blocked by the remoting of the data PipedOutputStream pos = new PipedOutputStream(); StreamCopyThread sct = new StreamCopyThread("svn log copier", new PipedInputStream(pos), listener.getLogger()); sct.start(); try { SVNRevision r = getRevision(location); String revisionName = r.getDate() != null ? fmt.format(r.getDate()) : r.toString(); listener.getLogger() .println("Checking out " + location.remote + " at revision " + revisionName); File local = new File(ws, location.getLocalDir()); SubversionUpdateEventHandler eventHandler = new SubversionUpdateEventHandler( new PrintStream(pos), externals, local, location.getLocalDir()); svnuc.setEventHandler(eventHandler); svnuc.setExternalsHandler(eventHandler); svnuc.setIgnoreExternals(location.isIgnoreExternalsOption()); SVNDepth svnDepth = getSvnDepth(location.getDepthOption()); svnuc.doCheckout(location.getSVNURL(), local.getCanonicalFile(), SVNRevision.HEAD, r, svnDepth, true); } catch (SVNCancelException e) { if (isAuthenticationFailedError(e)) { e.printStackTrace(listener.error("Failed to check out " + location.remote)); return null; } else { listener.error("Subversion checkout has been canceled"); throw (InterruptedException) new InterruptedException().initCause(e); } } catch (SVNException e) { e.printStackTrace(listener.error("Failed to check out " + location.remote)); throw new IOException("Failed to check out " + location.remote, e); } finally { try { pos.close(); } finally { try { sct.join(); // wait for all data to be piped. } catch (InterruptedException e) { throw new IOException2("interrupted", e); } } } return externals; } }; }
From source file:com.netflix.spinnaker.clouddriver.jobs.local.JobExecutorLocal.java
private <T> JobResult<T> executeStreaming(JobRequest jobRequest, ReaderConsumer<T> consumer) throws IOException { PipedOutputStream stdOut = new PipedOutputStream(); ByteArrayOutputStream stdErr = new ByteArrayOutputStream(); Executor executor = buildExecutor(new PumpStreamHandler(stdOut, stdErr, jobRequest.getInputStream())); DefaultExecuteResultHandler resultHandler = new DefaultExecuteResultHandler(); executor.execute(jobRequest.getCommandLine(), jobRequest.getEnvironment(), resultHandler); T result = consumer.consume(new BufferedReader(new InputStreamReader(new PipedInputStream(stdOut)))); try {// w ww. j av a 2s . c o m resultHandler.waitFor(); } catch (InterruptedException e) { executor.getWatchdog().destroyProcess(); Thread.currentThread().interrupt(); throw new RuntimeException(e); } return JobResult.<T>builder() .result(resultHandler.getExitValue() == 0 ? JobResult.Result.SUCCESS : JobResult.Result.FAILURE) .killed(executor.getWatchdog().killedProcess()).output(result).error(stdErr.toString()).build(); }
From source file:com.emc.ecs.sync.CasMigrationTest.java
private String pipeAndGetMd5(byte[] source) throws Exception { PipedInputStream pin = new PipedInputStream(BUFFER_SIZE); PipedOutputStream pout = new PipedOutputStream(pin); Producer producer = new Producer(source, pout); // produce in parallel Thread producerThread = new Thread(producer); producerThread.start();//from www . ja va2 s. c om // consume inside this thread byte[] dest = new byte[source.length]; try { int read = 0; while (read < dest.length && read != -1) { read += pin.read(dest, read, dest.length - read); } } finally { try { pin.close(); } catch (Throwable t) { // ignore } } // synchronize producerThread.join(); return Hex.encodeHexString(MessageDigest.getInstance("MD5").digest(dest)); }
From source file:org.pircbotx.PircBotXOutputTest.java
@BeforeMethod public void botSetup() throws Exception { //Setup bot/*from w ww.java 2 s.co m*/ inputLatch = new CountDownLatch(1); bot = new PircBotX() { @Override protected InputThread createInputThread(Socket socket, BufferedReader breader) { return new InputThread(bot, socket, breader) { @Override public void run() { //Do nothing } }; } }; bot.setListenerManager(new GenericListenerManager()); bot.setNick("PircBotXBot"); bot.setName("PircBotXBot"); bot.setMessageDelay(0L); //Setup streams for bot PipedOutputStream out = new PipedOutputStream(); //Create an input stream that we'll kill later in = new ByteArrayInputStream("".getBytes()); Socket socket = mock(Socket.class); when(socket.isConnected()).thenReturn(true); when(socket.getInputStream()).thenReturn(in); when(socket.getOutputStream()).thenReturn(out); socketFactory = mock(SocketFactory.class); when(socketFactory.createSocket("example.com", 6667, null, 0)).thenReturn(socket); //Setup ability to read from bots output botOut = new BufferedReader(new InputStreamReader(new PipedInputStream(out))); //Connect the bot to the socket bot.connect("example.com", 6667, null, socketFactory); //Make sure the bot is connected verify(socketFactory).createSocket("example.com", 6667, null, 0); //Setup useful vars aUser = bot.getUser("aUser"); aChannel = bot.getChannel("#aChannel"); }