Example usage for java.io PipedOutputStream PipedOutputStream

List of usage examples for java.io PipedOutputStream PipedOutputStream

Introduction

In this page you can find the example usage for java.io PipedOutputStream PipedOutputStream.

Prototype

public PipedOutputStream() 

Source Link

Document

Creates a piped output stream that is not yet connected to a piped input stream.

Usage

From source file:org.whitesource.agent.utils.ZipUtils.java

private static void fillExportStreamCompress(String text, OutputStream exportByteArrayOutputStream) {
    try {//from  w  ww .  j a  v a 2  s.c o  m
        try (PipedInputStream pipedInputStream = new PipedInputStream()) {
            try (PipedOutputStream pipedOutputStream = new PipedOutputStream()) {
                pipedInputStream.connect(pipedOutputStream);

                Runnable producer = new Runnable() {
                    @Override
                    public void run() {
                        produceCompressDataFromText(text, pipedOutputStream);
                    }
                };
                Runnable consumer = new Runnable() {
                    @Override
                    public void run() {
                        consumeCompressData(pipedInputStream, exportByteArrayOutputStream);
                    }
                };

                transferData(producer, consumer);
            }
        }
    } catch (IOException e) {
        // logger.error("Failed to produce data :", e);
    }
}

From source file:org.mariotaku.twidere.loader.MicroBlogAPIStatusesLoader.java

private void saveCachedData(final List<ParcelableStatus> data) {
    final String key = getSerializationKey();
    if (key == null || data == null)
        return;//from w w w .  j  a  v  a 2 s  . c  o  m
    final int databaseItemLimit = mPreferences.getInt(KEY_DATABASE_ITEM_LIMIT, DEFAULT_DATABASE_ITEM_LIMIT);
    try {
        final List<ParcelableStatus> statuses = data.subList(0, Math.min(databaseItemLimit, data.size()));
        final PipedOutputStream pos = new PipedOutputStream();
        final PipedInputStream pis = new PipedInputStream(pos);
        final Future<Object> future = pool.submit(new Callable<Object>() {
            @Override
            public Object call() throws Exception {
                LoganSquareMapperFinder.mapperFor(ParcelableStatus.class).serialize(statuses, pos);
                return null;
            }
        });
        final boolean saved = mFileCache.save(key, pis, new IoUtils.CopyListener() {
            @Override
            public boolean onBytesCopied(int current, int total) {
                return !future.isDone();
            }
        });
        if (BuildConfig.DEBUG) {
            Log.v(LOGTAG, key + " saved: " + saved);
        }
    } catch (final Exception e) {
        // Ignore
        if (BuildConfig.DEBUG && !(e instanceof IOException)) {
            Log.w(LOGTAG, e);
        }
    }
}

From source file:jp.ikedam.jenkins.plugins.viewcopy_builder.ViewcopyBuilder.java

/**
 * Returns the configuration XML document of a view
 * // w  w  w .j a va  2 s  .  co m
 * @param view
 * @param logger
 * @return
 * @throws IOException 
 * @throws SAXException 
 * @throws ParserConfigurationException 
 */
private Document getViewConfigXmlDocument(View view, final PrintStream logger)
        throws IOException, SAXException, ParserConfigurationException {
    XStream2 xStream2 = new XStream2(new DomDriver("UTF-8"));
    xStream2.omitField(View.class, "owner");
    xStream2.omitField(View.class, "name"); // this field causes disaster when overwriting.

    PipedOutputStream sout = new PipedOutputStream();
    PipedInputStream sin = new PipedInputStream(sout);

    xStream2.toXML(view, sout);
    sout.close();

    DocumentBuilderFactory domFactory = DocumentBuilderFactory.newInstance();
    DocumentBuilder builder = domFactory.newDocumentBuilder();
    builder.setErrorHandler(new ErrorHandler() {
        @Override
        public void warning(SAXParseException exception) throws SAXException {
            exception.printStackTrace(logger);
        }

        @Override
        public void error(SAXParseException exception) throws SAXException {
            exception.printStackTrace(logger);
        }

        @Override
        public void fatalError(SAXParseException exception) throws SAXException {
            exception.printStackTrace(logger);
        }
    });
    return builder.parse(sin);
}

From source file:de.upb.wdqa.wdvd.FeatureExtractor.java

private static InputStream getUncompressedStream(final InputStream inputStream) throws IOException {
    // the decompression is a major bottleneck, make sure that it does not
    // have to wait for the buffer to empty
    final PipedOutputStream pipedOutputStream = new PipedOutputStream();
    final PipedInputStream pipedInputStream = new PipedInputStream(pipedOutputStream, BUFFER_SIZE);

    new Thread("Dump File Decompressor") {
        @Override/*from w w  w.  j  a va2  s.  c  om*/
        public void run() {
            try {
                InputStream compressorInputStream = new BZip2CompressorInputStream(inputStream);

                IOUtils.copy(compressorInputStream, pipedOutputStream);

                compressorInputStream.close();
                pipedOutputStream.close();
            } catch (IOException e) {
                logger.error("", e);
            }
        }
    }.start();

    return pipedInputStream;
}

From source file:ubicrypt.core.Utils.java

public static InputStream convert(final Observable<byte[]> source) {
    final PipedOutputStream pos = new PipedOutputStream();
    try {//from www  . j av a 2  s  .c  o m
        final PipedInputStream pis = new PipedInputStream(pos);
        source.subscribe(bytes -> {
            try {
                pos.write(bytes);
                pos.flush();
            } catch (final IOException e) {
                Throwables.propagate(e);
            }
        }, err -> {
            log.error(err.getMessage(), err);
            try {
                pis.close();
            } catch (final IOException e) {
            }
        });
        return pis;
    } catch (final IOException e) {
        Throwables.propagate(e);
    }
    return null;
}

From source file:org.apache.pig.shock.SSHSocketImplFactory.java

@Override
protected void connect(SocketAddress address, int timeout) throws IOException {
    try {/*  www  . java  2 s .co m*/
        if (!session.isConnected()) {
            session.connect();
        }
        channel = (ChannelDirectTCPIP) session.openChannel("direct-tcpip");
        //is = channel.getInputStream();
        //os = channel.getOutputStream();
        channel.setHost(((InetSocketAddress) address).getHostName());
        channel.setPort(((InetSocketAddress) address).getPort());
        channel.setOrgPort(22);
        is = new PipedInputStream();
        os = new PipedOutputStream();
        channel.setInputStream(new PipedInputStream((PipedOutputStream) os));
        channel.setOutputStream(new PipedOutputStream((PipedInputStream) is));
        channel.connect();
        if (!channel.isConnected()) {
            log.error("Not connected");
        }
        if (channel.isEOF()) {
            log.error("EOF");
        }
    } catch (JSchException e) {
        log.error(e);
        IOException newE = new IOException(e.getMessage());
        newE.setStackTrace(e.getStackTrace());
        throw newE;
    }
}

From source file:org.jaqpot.core.service.client.jpdi.JPDIClientImpl.java

@Override
public Future<Dataset> predict(Dataset inputDataset, Model model, MetaInfo datasetMeta, String taskId) {

    CompletableFuture<Dataset> futureDataset = new CompletableFuture<>();

    Dataset dataset = DatasetFactory.copy(inputDataset);
    Dataset tempWithDependentFeatures = DatasetFactory.copy(dataset,
            new HashSet<>(model.getDependentFeatures()));

    dataset.getDataEntry().parallelStream().forEach(dataEntry -> {
        dataEntry.getValues().keySet().retainAll(model.getIndependentFeatures());
    });/*from w  w  w. j  ava 2 s. c  o  m*/
    PredictionRequest predictionRequest = new PredictionRequest();
    predictionRequest.setDataset(dataset);
    predictionRequest.setRawModel(model.getActualModel());
    predictionRequest.setAdditionalInfo(model.getAdditionalInfo());

    final HttpPost request = new HttpPost(model.getAlgorithm().getPredictionService());
    request.addHeader("Accept", "application/json");
    request.addHeader("Content-Type", "application/json");

    PipedOutputStream out = new PipedOutputStream();
    PipedInputStream in;
    try {
        in = new PipedInputStream(out);
    } catch (IOException ex) {
        futureDataset.completeExceptionally(ex);
        return futureDataset;
    }
    request.setEntity(new InputStreamEntity(in, ContentType.APPLICATION_JSON));

    Future futureResponse = client.execute(request, new FutureCallback<HttpResponse>() {

        @Override
        public void completed(final HttpResponse response) {
            futureMap.remove(taskId);
            int status = response.getStatusLine().getStatusCode();
            try {
                InputStream responseStream = response.getEntity().getContent();

                switch (status) {
                case 200:
                case 201:
                    try {
                        PredictionResponse predictionResponse = serializer.parse(responseStream,
                                PredictionResponse.class);

                        List<LinkedHashMap<String, Object>> predictions = predictionResponse.getPredictions();
                        if (dataset.getDataEntry().isEmpty()) {
                            DatasetFactory.addEmptyRows(dataset, predictions.size());
                        }
                        List<Feature> features = featureHandler
                                .findBySource("algorithm/" + model.getAlgorithm().getId());
                        IntStream.range(0, dataset.getDataEntry().size())
                                // .parallel()
                                .forEach(i -> {
                                    Map<String, Object> row = predictions.get(i);
                                    DataEntry dataEntry = dataset.getDataEntry().get(i);
                                    if (model.getAlgorithm().getOntologicalClasses().contains("ot:Scaling")
                                            || model.getAlgorithm().getOntologicalClasses()
                                                    .contains("ot:Transformation")) {
                                        dataEntry.getValues().clear();
                                        dataset.getFeatures().clear();
                                    }
                                    row.entrySet().stream().forEach(entry -> {
                                        //                                                    Feature feature = featureHandler.findByTitleAndSource(entry.getKey(), "algorithm/" + model.getAlgorithm().getId());
                                        Feature feature = features.stream()
                                                .filter(f -> f.getMeta().getTitles().contains(entry.getKey()))
                                                .findFirst().orElse(null);
                                        if (feature == null) {
                                            return;
                                        }
                                        dataEntry.getValues().put(baseURI + "feature/" + feature.getId(),
                                                entry.getValue());
                                        FeatureInfo featInfo = new FeatureInfo(
                                                baseURI + "feature/" + feature.getId(),
                                                feature.getMeta().getTitles().stream().findFirst().get());
                                        featInfo.setCategory(Dataset.DescriptorCategory.PREDICTED);
                                        dataset.getFeatures().add(featInfo);
                                    });
                                });
                        dataset.setId(randomStringGenerator.nextString(20));
                        dataset.setTotalRows(dataset.getDataEntry().size());
                        dataset.setMeta(datasetMeta);
                        futureDataset.complete(DatasetFactory.mergeColumns(dataset, tempWithDependentFeatures));
                    } catch (Exception ex) {
                        futureDataset.completeExceptionally(ex);
                    }
                    break;
                case 400:
                    String message = new BufferedReader(new InputStreamReader(responseStream)).lines()
                            .collect(Collectors.joining("\n"));
                    futureDataset.completeExceptionally(new BadRequestException(message));
                    break;
                case 404:
                    message = new BufferedReader(new InputStreamReader(responseStream)).lines()
                            .collect(Collectors.joining("\n"));
                    futureDataset.completeExceptionally(new NotFoundException(message));
                    break;
                case 500:
                    message = new BufferedReader(new InputStreamReader(responseStream)).lines()
                            .collect(Collectors.joining("\n"));
                    futureDataset.completeExceptionally(new InternalServerErrorException(message));
                    break;
                default:
                    message = new BufferedReader(new InputStreamReader(responseStream)).lines()
                            .collect(Collectors.joining("\n"));
                    futureDataset.completeExceptionally(new InternalServerErrorException(message));
                }
            } catch (IOException | UnsupportedOperationException ex) {
                futureDataset.completeExceptionally(ex);
            }
        }

        @Override
        public void failed(final Exception ex) {
            futureMap.remove(taskId);
            futureDataset.completeExceptionally(new InternalServerErrorException(ex));
        }

        @Override
        public void cancelled() {
            futureMap.remove(taskId);
            futureDataset.cancel(true);
        }
    });
    serializer.write(predictionRequest, out);
    try {
        out.close();
    } catch (IOException ex) {
        futureDataset.completeExceptionally(ex);
    }
    futureMap.put(taskId, futureResponse);
    return futureDataset;
}

From source file:hu.sztaki.lpds.pgportal.services.dspace.LNIclient.java

/**
 * Starts a two-stage WebDAV PUT operation, which gives the caller
 * an OutputStream on which to write the body.  The expected
 * sequence is:  call startPut(), write the body, close the stream,
 * and then call finishPut() to obtain the Handle of the
 * newly-created resource.//from w w w .  j a v a2s .  c  om
 * <p>
 * The actual PUT method is executed in a separate thread since it
 * has to read data from the pipe attached to the returned
 * OutputStream, and this thread must write to that OutputStream.
 * <p>
 * Since the LNI only submits Items, the target must be a
 * collection.
 *
 * @param collection Handle of the target, i.e. collection into which Item is submitted
 * @param type Package type, actually the name of package ingester plugin on the server.
 * @param options other HTTP options which are passed to package ingester plugin
 * @return an OutputStream on which the request body is written, it then MUST be closed.
 */
public OutputStream startPut(String collection, String type, NameValuePair options[])
        throws IOException, HttpException {
    PipedOutputStream out = new PipedOutputStream();
    PipedInputStream in = new PipedInputStream(out);
    startPutInternal(collection, type, options, in);
    lastPutThread = new Thread(this);
    lastPutThread.start();
    return out;
}

From source file:jp.ikedam.jenkins.plugins.viewcopy_builder.ViewcopyBuilder.java

/**
 * Returns a InputStream of a XML document.
 * /*from w w  w  . j  a v a  2  s.co  m*/
 * @param doc
 * @return
 * @throws TransformerException
 * @throws IOException
 */
private InputStream getInputStreamFromDocument(Document doc) throws TransformerException, IOException {
    TransformerFactory tfactory = TransformerFactory.newInstance();
    Transformer transformer = tfactory.newTransformer();
    PipedOutputStream sout = new PipedOutputStream();
    PipedInputStream sin = new PipedInputStream(sout);
    transformer.transform(new DOMSource(doc), new StreamResult(sout));
    sout.close();

    return sin;
}

From source file:com.dtolabs.rundeck.core.execution.impl.jsch.JschNodeExecutor.java

public NodeExecutorResult executeCommand(final ExecutionContext context, final String[] command,
        final INodeEntry node) {
    if (null == node.getHostname() || null == node.extractHostname()) {
        return NodeExecutorResultImpl.createFailure(StepFailureReason.ConfigurationFailure,
                "Hostname must be set to connect to remote node '" + node.getNodename() + "'", node);
    }/*from   w  w  w. j  a  va2 s .c om*/

    final ExecutionListener listener = context.getExecutionListener();
    final Project project = new Project();
    AntSupport.addAntBuildListener(listener, project);

    boolean success = false;
    final ExtSSHExec sshexec;
    //perform jsch sssh command
    final NodeSSHConnectionInfo nodeAuthentication = new NodeSSHConnectionInfo(node, framework, context);
    final int timeout = nodeAuthentication.getSSHTimeout();
    try {

        sshexec = SSHTaskBuilder.build(node, command, project, context.getDataContext(), nodeAuthentication,
                context.getLoglevel(), listener);
    } catch (SSHTaskBuilder.BuilderException e) {
        return NodeExecutorResultImpl.createFailure(StepFailureReason.ConfigurationFailure, e.getMessage(),
                node);
    }

    //Sudo support

    final ExecutorService executor = Executors.newSingleThreadExecutor(new ThreadFactory() {
        public Thread newThread(Runnable r) {
            return new Thread(null, r,
                    "SudoResponder " + node.getNodename() + ": " + System.currentTimeMillis());
        }
    });

    final Future<ResponderTask.ResponderResult> responderFuture;
    final SudoResponder sudoResponder = SudoResponder.create(node, framework, context);
    Runnable responderCleanup = null;
    if (sudoResponder.isSudoEnabled() && sudoResponder.matchesCommandPattern(command[0])) {
        final DisconnectResultHandler resultHandler = new DisconnectResultHandler();

        //configure two piped i/o stream pairs, to connect to the input/output of the SSH connection
        final PipedInputStream responderInput = new PipedInputStream();
        final PipedOutputStream responderOutput = new PipedOutputStream();
        final PipedInputStream jschInput = new PipedInputStream();
        //lead pipe allows connected inputstream to close and not hang the writer to this stream
        final PipedOutputStream jschOutput = new LeadPipeOutputStream();
        try {
            responderInput.connect(jschOutput);
            jschInput.connect(responderOutput);
        } catch (IOException e) {
            return NodeExecutorResultImpl.createFailure(StepFailureReason.IOFailure, e.getMessage(), node);
        }

        //first sudo prompt responder
        ResponderTask responder = new ResponderTask(sudoResponder, responderInput, responderOutput,
                resultHandler);

        /**
         * Callable will be executed by the ExecutorService
         */
        final Callable<ResponderTask.ResponderResult> responderResultCallable;

        //if 2nd responder
        final SudoResponder sudoResponder2 = SudoResponder.create(node, framework, context, SUDO2_OPT_PREFIX,
                DEFAULT_SUDO2_PASSWORD_OPTION, DEFAULT_SUDO2_COMMAND_PATTERN);
        if (sudoResponder2.isSudoEnabled()
                && sudoResponder2.matchesCommandPattern(CLIUtils.generateArgline(null, command, false))) {
            logger.debug("Enable second sudo responder");

            sudoResponder2.setDescription("Second " + SudoResponder.DEFAULT_DESCRIPTION);
            sudoResponder.setDescription("First " + SudoResponder.DEFAULT_DESCRIPTION);

            //sequence of the first then the second sudo responder
            responderResultCallable = responder.createSequence(sudoResponder2);
        } else {
            responderResultCallable = responder;
        }

        //set up SSH execution
        sshexec.setAllocatePty(true);
        sshexec.setInputStream(jschInput);
        sshexec.setSecondaryStream(jschOutput);
        sshexec.setDisconnectHolder(resultHandler);

        responderFuture = executor.submit(responderResultCallable);
        //close streams after responder is finished
        responderCleanup = new Runnable() {
            public void run() {
                logger.debug("SudoResponder shutting down...");
                try {
                    responderInput.close();
                } catch (IOException e) {
                    e.printStackTrace();
                }
                try {
                    responderOutput.flush();
                    responderOutput.close();
                } catch (IOException e) {
                    e.printStackTrace();
                }
                //executor pool shutdown
                executor.shutdownNow();
            }
        };
        executor.submit(responderCleanup);
    } else {
        responderFuture = null;
    }
    if (null != context.getExecutionListener()) {
        context.getExecutionListener().log(3, "Starting SSH Connection: " + nodeAuthentication.getUsername()
                + "@" + node.getHostname() + " (" + node.getNodename() + ")");
    }
    String errormsg = null;
    FailureReason failureReason = null;
    try {
        sshexec.execute();
        success = true;
    } catch (BuildException e) {
        final ExtractFailure extractJschFailure = extractFailure(e, node, timeout, framework);
        errormsg = extractJschFailure.getErrormsg();
        failureReason = extractJschFailure.getReason();
        context.getExecutionListener().log(0, errormsg);
    }
    if (null != responderCleanup) {
        responderCleanup.run();
    }
    shutdownAndAwaitTermination(executor);
    if (null != responderFuture) {
        try {
            logger.debug("Waiting 5 seconds for responder future result");
            final ResponderTask.ResponderResult result = responderFuture.get(5, TimeUnit.SECONDS);
            logger.debug("Responder result: " + result);
            if (!result.isSuccess() && !result.isInterrupted()) {
                context.getExecutionListener().log(0,
                        result.getResponder().toString() + " failed: " + result.getFailureReason());
            }
        } catch (InterruptedException e) {
            //ignore
        } catch (java.util.concurrent.ExecutionException e) {
            e.printStackTrace();
        } catch (TimeoutException e) {
            //ignore
        }
    }
    final int resultCode = sshexec.getExitStatus();

    if (success) {
        return NodeExecutorResultImpl.createSuccess(node);
    } else {
        return NodeExecutorResultImpl.createFailure(failureReason, errormsg, node, resultCode);
    }
}