List of usage examples for java.io PipedOutputStream PipedOutputStream
public PipedOutputStream()
From source file:org.lamport.tla.toolbox.jcloud.PayloadHelper.java
public static Payload appendModel2Jar(final Path modelPath, String mainClass, Properties properties, IProgressMonitor monitor) throws IOException { /*/*from w w w .j a va2 s. c om*/ * Get the standard tla2tools.jar from the classpath as a blueprint. * It's located in the org.lamport.tla.toolbox.jclouds bundle in the * files/ directory. It uses OSGi functionality to read files/tla2tools.jar * from the .jclouds bundle. * The copy of the blueprint will contain the spec & model and * additional metadata (properties, amended manifest). */ final Bundle bundle = FrameworkUtil.getBundle(PayloadHelper.class); final URL toolsURL = bundle.getEntry("files/tla2tools.jar"); if (toolsURL == null) { throw new RuntimeException("No tlatools.jar and/or spec to deploy"); } /* * Copy the tla2tools.jar blueprint to a temporary location on * disk to append model files below. */ final File tempFile = File.createTempFile("tla2tools", ".jar"); tempFile.deleteOnExit(); try (FileOutputStream out = new FileOutputStream(tempFile)) { IOUtils.copy(toolsURL.openStream(), out); } /* * Create a virtual filesystem in jar format. */ final Map<String, String> env = new HashMap<>(); env.put("create", "true"); final URI uri = URI.create("jar:" + tempFile.toURI()); try (FileSystem fs = FileSystems.newFileSystem(uri, env)) { /* * Copy the spec and model into the jar's model/ folder. * Also copy any module override (.class file) into the jar. */ try (DirectoryStream<Path> modelDirectoryStream = Files.newDirectoryStream(modelPath, "*.{cfg,tla,class}")) { for (final Path file : modelDirectoryStream) { final Path to = fs.getPath("/model/" + file.getFileName()); Files.copy(file, to, StandardCopyOption.REPLACE_EXISTING); } } /* * Add given class as Main-Class statement to jar's manifest. This * causes Java to launch this class when no other Main class is * given on the command line. Thus, it shortens the command line * for us. */ final Path manifestPath = fs.getPath("/META-INF/", "MANIFEST.MF"); final Manifest manifest = new Manifest(Files.newInputStream(manifestPath)); manifest.getMainAttributes().put(Attributes.Name.MAIN_CLASS, mainClass); final PipedOutputStream ps = new PipedOutputStream(); final PipedInputStream is = new PipedInputStream(ps); manifest.write(ps); ps.close(); Files.copy(is, manifestPath, StandardCopyOption.REPLACE_EXISTING); /* * Add properties file to archive. The property file contains the * result email address... from where TLC eventually reads it. */ // On Windows 7 and above the file has to be created in the system's // temp folder. Otherwise except file creation to fail with a // AccessDeniedException final File f = File.createTempFile("generated", "properties"); OutputStream out = new FileOutputStream(f); // Append all entries in "properties" to the temp file f properties.store(out, "This is an optional header comment string"); // Copy the temp file f into the jar with path /model/generated.properties. final Path to = fs.getPath("/model/generated.properties"); Files.copy(f.toPath(), to, StandardCopyOption.REPLACE_EXISTING); } catch (final IOException e1) { throw new RuntimeException("No model directory found to deploy", e1); } /* * Compress archive with pack200 to achieve a much higher compression rate. We * are going to send the file on the wire after all: * * effort: take more time choosing codings for better compression segment: use * largest-possible archive segments (>10% better compression) mod time: smear * modification times to a single value deflate: ignore all JAR deflation hints * in original archive */ final Packer packer = Pack200.newPacker(); final Map<String, String> p = packer.properties(); p.put(Packer.EFFORT, "9"); p.put(Packer.SEGMENT_LIMIT, "-1"); p.put(Packer.MODIFICATION_TIME, Packer.LATEST); p.put(Packer.DEFLATE_HINT, Packer.FALSE); // Do not reorder which changes package names. Pkg name changes e.g. break // SimpleFilenameToStream. p.put(Packer.KEEP_FILE_ORDER, Packer.TRUE); // Throw an error if any of the above attributes is unrecognized. p.put(Packer.UNKNOWN_ATTRIBUTE, Packer.ERROR); final File packTempFile = File.createTempFile("tla2tools", ".pack.gz"); try (final JarFile jarFile = new JarFile(tempFile); final GZIPOutputStream fos = new GZIPOutputStream(new FileOutputStream(packTempFile));) { packer.pack(jarFile, fos); } catch (IOException ioe) { throw new RuntimeException("Failed to pack200 the tla2tools.jar file", ioe); } /* * Convert the customized tla2tools.jar into a jClouds payload object. This is * the format it will be transfered on the wire. This is handled by jClouds * though. */ Payload jarPayLoad = null; try { final InputStream openStream = new FileInputStream(packTempFile); jarPayLoad = Payloads.newInputStreamPayload(openStream); // manually set length of content to prevent a NPE bug jarPayLoad.getContentMetadata().setContentLength(Long.valueOf(openStream.available())); } catch (final IOException e1) { throw new RuntimeException("No tlatools.jar to deploy", e1); } finally { monitor.worked(5); } return jarPayLoad; }
From source file:org.jumpmind.symmetric.transport.internal.InternalTransportManager.java
public IIncomingTransport getPullTransport(Node remote, final Node local, String securityToken, Map<String, String> requestProperties, String registrationUrl) throws IOException { final PipedOutputStream respOs = new PipedOutputStream(); final PipedInputStream respIs = new PipedInputStream(respOs); final ChannelMap suspendIgnoreChannels = symmetricEngine.getConfigurationService() .getSuspendIgnoreChannelLists(remote.getNodeId()); runAtClient(remote.getSyncUrl(), null, respOs, new IClientRunnable() { public void run(ISymmetricEngine engine, InputStream is, OutputStream os) throws Exception { IOutgoingTransport transport = new InternalOutgoingTransport(respOs, suspendIgnoreChannels, IoConstants.ENCODING); ProcessInfo processInfo = engine.getStatisticManager().newProcessInfo(new ProcessInfoKey( engine.getNodeService().findIdentityNodeId(), local.getNodeId(), ProcessType.PULL_HANDLER)); try { engine.getDataExtractorService().extract(processInfo, local, transport); processInfo.setStatus(Status.OK); } catch (RuntimeException ex) { processInfo.setStatus(Status.ERROR); throw ex; }//from w ww . jav a 2 s.c o m transport.close(); } }); return new InternalIncomingTransport(respIs); }
From source file:ro.kuberam.libs.java.crypto.CryptoModuleTests.java
public InputStream openStream() throws IOException { final PipedOutputStream out = new PipedOutputStream(); final PipedInputStream in = new PipedInputStream(out); final Runnable exporter = () -> { try {// w ww . ja va 2s . c om out.write("message".getBytes(StandardCharsets.UTF_8)); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } IOUtils.closeQuietly(out); }; // executor.submit(exporter); return in; }
From source file:at.sti2.sparkwave.ServerSocketThread.java
/** * TCP/IP Sparkwave Network Server//from w ww .j a v a 2 s. c om */ public void run() { try { //Open TCP/IP Server socket ServerSocket server = new ServerSocket(configuration.getPort()); logger.info("Server: " + server); while (!Thread.interrupted()) { logger.info("Waiting for connection..."); Socket sock = server.accept(); logger.info("Connected: " + sock); //TODO Not every connection should cause a rebuild of the plugin chain. Should work with arbitrary many connections and failure resistent. re-use plugin threads and parser threads. InputStream socketStreamIn = sock.getInputStream(); // PreProcessing Plugins to be loaded if (configuration.getPPPluginsConfig().size() == 2) { //TODO support arbitrary many plugins // Wiring: socketStreamIn --> (Plugin1) --> PipeOut1 --> PipeIn1 final PipedOutputStream pipeOut1 = new PipedOutputStream(); final PipedInputStream pipeIn1 = new PipedInputStream(pipeOut1); // Wiring: PipeIn1 --> (Plugin2) --> PipeOut2 --> PipeIn2 final PipedOutputStream pipeOut2 = new PipedOutputStream(); final PipedInputStream pipeIn2 = new PipedInputStream(pipeOut2); final ByteArrayOutputStream baos = new ByteArrayOutputStream(); // plugin configuration PPPluginConfig pluginConfig1 = configuration.getPPPluginsConfig().get(0); PreProcess plugin1 = instantiateAndConfigurePlugin(pluginConfig1, socketStreamIn, pipeOut1); PPPluginConfig pluginConfig2 = configuration.getPPPluginsConfig().get(1); PreProcess plugin2 = instantiateAndConfigurePlugin(pluginConfig2, pipeIn1, pipeOut2); // N3 Parser StreamParserThread sparkStreamParserThread = new StreamParserThread(pipeIn2, queues); // kick-off pre-process sparkwaveParserExecutor.execute(plugin1); sparkwaveParserExecutor.execute(plugin2); // kick-off parser sparkwaveParserExecutor.execute(sparkStreamParserThread); } else { StreamParserThread sparkStreamParserThread = new StreamParserThread(socketStreamIn, queues); // kick-off parser sparkwaveParserExecutor.execute(sparkStreamParserThread); } } } catch (Exception e) { logger.error(e.getMessage()); } finally { } }
From source file:io.fabric8.docker.client.impl.PushImage.java
@Override public TagForceToRegistryInterface<OutputHandle> redirectingOutput() { return new PushImage(client, config, name, tag, force, new PipedOutputStream(), listener); }
From source file:com.github.chenxiaolong.dualbootpatcher.switcher.ZipFlashingOutputFragment.java
@Override public void onStart() { super.onStart(); // Create terminal mSession = new TermSession(); // We don't care about any input because this is kind of a "dumb" terminal output, not // a proper interactive one mSession.setTermOut(new NullOutputStream()); mOS = new PipedOutputStream(); try {//from w ww . j a va2 s. c o m mSession.setTermIn(new PipedInputStream(mOS)); } catch (IOException e) { e.printStackTrace(); } mEmulatorView.attachSession(mSession); // Start and bind to the service Intent intent = new Intent(getActivity(), SwitcherService.class); getActivity().bindService(intent, this, Context.BIND_AUTO_CREATE); getActivity().startService(intent); }
From source file:de.tu_dresden.psy.fca.ConexpCljBridge.java
public ConexpCljBridge() { this.b = new byte[1]; /**/*from www .j a v a 2 s. c om*/ * build the command line (see conexp-clj/bin/conexp-clj) */ String java_bin = Launcher.getJavaCommand(); CommandLine conexp_cmd = new CommandLine(java_bin); conexp_cmd.addArgument("-server"); conexp_cmd.addArgument("-cp"); conexp_cmd.addArgument("./conexp-clj/lib/conexp-clj-0.0.7-alpha-SNAPSHOT-standalone.jar"); conexp_cmd.addArgument("clojure.main"); conexp_cmd.addArgument("-e"); conexp_cmd.addArgument(""); conexp_cmd.addArgument("./conexp-clj/lib/conexp-clj.clj"); /** * open the pipes */ this.to_conexp = new PipedOutputStream(); try { this.stream_to_conexp = new PipedInputStream(this.to_conexp, 2048); } catch (IOException e2) { e2.printStackTrace(); } this.stream_error_conexp = new PipedOutputStream(); this.stream_from_conexp = new PipedOutputStream(); try { this.from_conexp = new PipedInputStream(this.stream_from_conexp, 2048); } catch (IOException e1) { e1.printStackTrace(); } try { this.error_conexp = new PipedInputStream(this.stream_error_conexp, 2048); } catch (IOException e1) { e1.printStackTrace(); } /** * setup apache commons exec */ this.result = new DefaultExecuteResultHandler(); DefaultExecutor executor = new DefaultExecutor(); executor.setExitValue(0); executor.setStreamHandler( new PumpStreamHandler(this.stream_from_conexp, this.stream_error_conexp, this.stream_to_conexp)); /** * run in non-blocking mode */ try { executor.execute(conexp_cmd, this.result); } catch (ExecuteException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } this.output_buffer = ""; }
From source file:org.apache.zeppelin.spark.ZeppelinR.java
/** * Start R repl/*w w w.j ava 2s .co m*/ * @throws IOException */ public void open() throws IOException { createRScript(); zeppelinR.put(hashCode(), this); CommandLine cmd = CommandLine.parse(rCmdPath); cmd.addArgument("--no-save"); cmd.addArgument("--no-restore"); cmd.addArgument("-f"); cmd.addArgument(scriptPath); cmd.addArgument("--args"); cmd.addArgument(Integer.toString(hashCode())); cmd.addArgument(Integer.toString(port)); cmd.addArgument(libPath); cmd.addArgument(Integer.toString(sparkVersion.toNumber())); // dump out the R command to facilitate manually running it, e.g. for fault diagnosis purposes logger.debug(cmd.toString()); executor = new DefaultExecutor(); outputStream = new InterpreterOutputStream(logger); input = new PipedOutputStream(); PipedInputStream in = new PipedInputStream(input); PumpStreamHandler streamHandler = new PumpStreamHandler(outputStream, outputStream, in); executor.setWatchdog(new ExecuteWatchdog(ExecuteWatchdog.INFINITE_TIMEOUT)); executor.setStreamHandler(streamHandler); Map env = EnvironmentUtils.getProcEnvironment(); initialOutput = new InterpreterOutput(null); outputStream.setInterpreterOutput(initialOutput); executor.execute(cmd, env, this); rScriptRunning = true; // flush output eval("cat('')"); }
From source file:org.pentaho.s3.vfs.S3FileObject.java
protected OutputStream doGetOutputStream(final boolean append) throws Exception { final ByteArrayOutputStream output = new ByteArrayOutputStream(); final PipedInputStream pis = new PipedInputStream(); final Thread t = new Thread(new Runnable() { public void run() { try { IOUtils.copy(pis, output); } catch (IOException e) { e.printStackTrace();/*from w w w . j av a 2 s. co m*/ } } }); t.start(); final PipedOutputStream pos = new PipedOutputStream() { public void close() throws IOException { super.close(); try { // wait for reader to finish t.join(); S3Object s3Object = getS3Object(true); byte[] bytes = output.toByteArray(); s3Object.setContentLength(bytes.length); s3Object.setDataInputStream(new ByteArrayInputStream(bytes)); fileSystem.getS3Service().putObject(getS3Bucket(), s3Object); } catch (Exception e) { e.printStackTrace(); } } }; pis.connect(pos); return pos; }
From source file:SerialIntList.java
/** * Use object serialization to make a "deep clone" of the object o. This * method serializes o and all objects it refers to, and then deserializes * that graph of objects, which means that everything is copied. This differs * from the clone() method of an object which is usually implemented to * produce a "shallow" clone that copies references to other objects, instead * of copying all referenced objects.//from w w w . j ava 2 s .c o m */ static Object deepclone(final Serializable o) throws IOException, ClassNotFoundException { // Create a connected pair of "piped" streams. // We'll write bytes to one, and them from the other one. final PipedOutputStream pipeout = new PipedOutputStream(); PipedInputStream pipein = new PipedInputStream(pipeout); // Now define an independent thread to serialize the object and write // its bytes to the PipedOutputStream Thread writer = new Thread() { public void run() { ObjectOutputStream out = null; try { out = new ObjectOutputStream(pipeout); out.writeObject(o); } catch (IOException e) { } finally { try { out.close(); } catch (Exception e) { } } } }; writer.start(); // Make the thread start serializing and writing // Meanwhile, in this thread, read and deserialize from the piped // input stream. The resulting object is a deep clone of the original. ObjectInputStream in = new ObjectInputStream(pipein); return in.readObject(); }