Example usage for java.io PipedInputStream PipedInputStream

List of usage examples for java.io PipedInputStream PipedInputStream

Introduction

In this page you can find the example usage for java.io PipedInputStream PipedInputStream.

Prototype

public PipedInputStream(int pipeSize) 

Source Link

Document

Creates a PipedInputStream so that it is not yet #connect(java.io.PipedOutputStream) connected and uses the specified pipe size for the pipe's buffer.

Usage

From source file:org.lockss.util.TestStreamUtil.java

public void testReadBufShortRead() throws Exception {
    byte[] snd1 = { '0', '1', 0, '3' };
    final int len = 12;
    final byte[] buf = new byte[len];
    PipedOutputStream outs = new PipedOutputStream();
    final InputStream ins = new PipedInputStream(outs);
    final Exception[] ex = { null };
    final int[] res = { 0 };
    Thread th = new Thread() {
        public void run() {
            try {
                res[0] = StreamUtil.readBytes(ins, buf, len);
                StreamUtil.readBytes(ins, buf, len);
            } catch (IOException e) {
                ex[0] = e;//from  ww  w .  j  ava 2 s .co  m
            }
        }
    };
    th.start();
    outs.write(snd1);
    outs.close();
    th.join();

    assertEquals(snd1.length, res[0]);
    assertEquals(null, ex[0]);
}

From source file:org.lockss.util.TestStreamUtil.java

public void testReadBufMultipleRead() throws Exception {
    byte[] snd1 = { '0', '1', 0, '3' };
    byte[] snd2 = { '4', '5', '6', '7', '8', '9', 'a', 'b' };
    byte[] exp = { '0', '1', 0, '3', '4', '5', '6', '7', '8', '9', 'a', 'b' };
    final int len = exp.length;
    final byte[] buf = new byte[len];
    PipedOutputStream outs = new PipedOutputStream();
    final InputStream ins = new PipedInputStream(outs);
    final Exception[] ex = { null };
    final int[] res = { 0 };
    Thread th = new Thread() {
        public void run() {
            try {
                res[0] = StreamUtil.readBytes(ins, buf, len);
            } catch (IOException e) {
                ex[0] = e;//from  w w  w.  j a v a 2s . com
            }
        }
    };
    th.start();
    outs.write(snd1);
    TimerUtil.guaranteedSleep(100);
    outs.write(snd2);
    outs.flush();
    th.join();

    assertEquals(exp, buf);
    assertEquals(len, res[0]);
    assertNull(ex[0]);
    outs.close();
}

From source file:org.mariotaku.twidere.loader.MicroBlogAPIStatusesLoader.java

private void saveCachedData(final List<ParcelableStatus> data) {
    final String key = getSerializationKey();
    if (key == null || data == null)
        return;// w w  w . j  a  v a2s. c  o  m
    final int databaseItemLimit = mPreferences.getInt(KEY_DATABASE_ITEM_LIMIT, DEFAULT_DATABASE_ITEM_LIMIT);
    try {
        final List<ParcelableStatus> statuses = data.subList(0, Math.min(databaseItemLimit, data.size()));
        final PipedOutputStream pos = new PipedOutputStream();
        final PipedInputStream pis = new PipedInputStream(pos);
        final Future<Object> future = pool.submit(new Callable<Object>() {
            @Override
            public Object call() throws Exception {
                LoganSquareMapperFinder.mapperFor(ParcelableStatus.class).serialize(statuses, pos);
                return null;
            }
        });
        final boolean saved = mFileCache.save(key, pis, new IoUtils.CopyListener() {
            @Override
            public boolean onBytesCopied(int current, int total) {
                return !future.isDone();
            }
        });
        if (BuildConfig.DEBUG) {
            Log.v(LOGTAG, key + " saved: " + saved);
        }
    } catch (final Exception e) {
        // Ignore
        if (BuildConfig.DEBUG && !(e instanceof IOException)) {
            Log.w(LOGTAG, e);
        }
    }
}

From source file:jp.ikedam.jenkins.plugins.viewcopy_builder.ViewcopyBuilder.java

/**
 * Returns the configuration XML document of a view
 * //from ww w  .j a v  a 2 s .co m
 * @param view
 * @param logger
 * @return
 * @throws IOException 
 * @throws SAXException 
 * @throws ParserConfigurationException 
 */
private Document getViewConfigXmlDocument(View view, final PrintStream logger)
        throws IOException, SAXException, ParserConfigurationException {
    XStream2 xStream2 = new XStream2(new DomDriver("UTF-8"));
    xStream2.omitField(View.class, "owner");
    xStream2.omitField(View.class, "name"); // this field causes disaster when overwriting.

    PipedOutputStream sout = new PipedOutputStream();
    PipedInputStream sin = new PipedInputStream(sout);

    xStream2.toXML(view, sout);
    sout.close();

    DocumentBuilderFactory domFactory = DocumentBuilderFactory.newInstance();
    DocumentBuilder builder = domFactory.newDocumentBuilder();
    builder.setErrorHandler(new ErrorHandler() {
        @Override
        public void warning(SAXParseException exception) throws SAXException {
            exception.printStackTrace(logger);
        }

        @Override
        public void error(SAXParseException exception) throws SAXException {
            exception.printStackTrace(logger);
        }

        @Override
        public void fatalError(SAXParseException exception) throws SAXException {
            exception.printStackTrace(logger);
        }
    });
    return builder.parse(sin);
}

From source file:ubicrypt.core.Utils.java

public static InputStream convert(final Observable<byte[]> source) {
    final PipedOutputStream pos = new PipedOutputStream();
    try {/*  ww  w  . j  a  v  a 2 s. c om*/
        final PipedInputStream pis = new PipedInputStream(pos);
        source.subscribe(bytes -> {
            try {
                pos.write(bytes);
                pos.flush();
            } catch (final IOException e) {
                Throwables.propagate(e);
            }
        }, err -> {
            log.error(err.getMessage(), err);
            try {
                pis.close();
            } catch (final IOException e) {
            }
        });
        return pis;
    } catch (final IOException e) {
        Throwables.propagate(e);
    }
    return null;
}

From source file:org.jaqpot.core.service.client.jpdi.JPDIClientImpl.java

@Override
public Future<Dataset> predict(Dataset inputDataset, Model model, MetaInfo datasetMeta, String taskId) {

    CompletableFuture<Dataset> futureDataset = new CompletableFuture<>();

    Dataset dataset = DatasetFactory.copy(inputDataset);
    Dataset tempWithDependentFeatures = DatasetFactory.copy(dataset,
            new HashSet<>(model.getDependentFeatures()));

    dataset.getDataEntry().parallelStream().forEach(dataEntry -> {
        dataEntry.getValues().keySet().retainAll(model.getIndependentFeatures());
    });/*from w  w  w  . ja  v a  2 s  .c o  m*/
    PredictionRequest predictionRequest = new PredictionRequest();
    predictionRequest.setDataset(dataset);
    predictionRequest.setRawModel(model.getActualModel());
    predictionRequest.setAdditionalInfo(model.getAdditionalInfo());

    final HttpPost request = new HttpPost(model.getAlgorithm().getPredictionService());
    request.addHeader("Accept", "application/json");
    request.addHeader("Content-Type", "application/json");

    PipedOutputStream out = new PipedOutputStream();
    PipedInputStream in;
    try {
        in = new PipedInputStream(out);
    } catch (IOException ex) {
        futureDataset.completeExceptionally(ex);
        return futureDataset;
    }
    request.setEntity(new InputStreamEntity(in, ContentType.APPLICATION_JSON));

    Future futureResponse = client.execute(request, new FutureCallback<HttpResponse>() {

        @Override
        public void completed(final HttpResponse response) {
            futureMap.remove(taskId);
            int status = response.getStatusLine().getStatusCode();
            try {
                InputStream responseStream = response.getEntity().getContent();

                switch (status) {
                case 200:
                case 201:
                    try {
                        PredictionResponse predictionResponse = serializer.parse(responseStream,
                                PredictionResponse.class);

                        List<LinkedHashMap<String, Object>> predictions = predictionResponse.getPredictions();
                        if (dataset.getDataEntry().isEmpty()) {
                            DatasetFactory.addEmptyRows(dataset, predictions.size());
                        }
                        List<Feature> features = featureHandler
                                .findBySource("algorithm/" + model.getAlgorithm().getId());
                        IntStream.range(0, dataset.getDataEntry().size())
                                // .parallel()
                                .forEach(i -> {
                                    Map<String, Object> row = predictions.get(i);
                                    DataEntry dataEntry = dataset.getDataEntry().get(i);
                                    if (model.getAlgorithm().getOntologicalClasses().contains("ot:Scaling")
                                            || model.getAlgorithm().getOntologicalClasses()
                                                    .contains("ot:Transformation")) {
                                        dataEntry.getValues().clear();
                                        dataset.getFeatures().clear();
                                    }
                                    row.entrySet().stream().forEach(entry -> {
                                        //                                                    Feature feature = featureHandler.findByTitleAndSource(entry.getKey(), "algorithm/" + model.getAlgorithm().getId());
                                        Feature feature = features.stream()
                                                .filter(f -> f.getMeta().getTitles().contains(entry.getKey()))
                                                .findFirst().orElse(null);
                                        if (feature == null) {
                                            return;
                                        }
                                        dataEntry.getValues().put(baseURI + "feature/" + feature.getId(),
                                                entry.getValue());
                                        FeatureInfo featInfo = new FeatureInfo(
                                                baseURI + "feature/" + feature.getId(),
                                                feature.getMeta().getTitles().stream().findFirst().get());
                                        featInfo.setCategory(Dataset.DescriptorCategory.PREDICTED);
                                        dataset.getFeatures().add(featInfo);
                                    });
                                });
                        dataset.setId(randomStringGenerator.nextString(20));
                        dataset.setTotalRows(dataset.getDataEntry().size());
                        dataset.setMeta(datasetMeta);
                        futureDataset.complete(DatasetFactory.mergeColumns(dataset, tempWithDependentFeatures));
                    } catch (Exception ex) {
                        futureDataset.completeExceptionally(ex);
                    }
                    break;
                case 400:
                    String message = new BufferedReader(new InputStreamReader(responseStream)).lines()
                            .collect(Collectors.joining("\n"));
                    futureDataset.completeExceptionally(new BadRequestException(message));
                    break;
                case 404:
                    message = new BufferedReader(new InputStreamReader(responseStream)).lines()
                            .collect(Collectors.joining("\n"));
                    futureDataset.completeExceptionally(new NotFoundException(message));
                    break;
                case 500:
                    message = new BufferedReader(new InputStreamReader(responseStream)).lines()
                            .collect(Collectors.joining("\n"));
                    futureDataset.completeExceptionally(new InternalServerErrorException(message));
                    break;
                default:
                    message = new BufferedReader(new InputStreamReader(responseStream)).lines()
                            .collect(Collectors.joining("\n"));
                    futureDataset.completeExceptionally(new InternalServerErrorException(message));
                }
            } catch (IOException | UnsupportedOperationException ex) {
                futureDataset.completeExceptionally(ex);
            }
        }

        @Override
        public void failed(final Exception ex) {
            futureMap.remove(taskId);
            futureDataset.completeExceptionally(new InternalServerErrorException(ex));
        }

        @Override
        public void cancelled() {
            futureMap.remove(taskId);
            futureDataset.cancel(true);
        }
    });
    serializer.write(predictionRequest, out);
    try {
        out.close();
    } catch (IOException ex) {
        futureDataset.completeExceptionally(ex);
    }
    futureMap.put(taskId, futureResponse);
    return futureDataset;
}

From source file:hu.sztaki.lpds.pgportal.services.dspace.LNIclient.java

/**
 * Starts a two-stage WebDAV PUT operation, which gives the caller
 * an OutputStream on which to write the body.  The expected
 * sequence is:  call startPut(), write the body, close the stream,
 * and then call finishPut() to obtain the Handle of the
 * newly-created resource./* w w  w.jav  a2s . c om*/
 * <p>
 * The actual PUT method is executed in a separate thread since it
 * has to read data from the pipe attached to the returned
 * OutputStream, and this thread must write to that OutputStream.
 * <p>
 * Since the LNI only submits Items, the target must be a
 * collection.
 *
 * @param collection Handle of the target, i.e. collection into which Item is submitted
 * @param type Package type, actually the name of package ingester plugin on the server.
 * @param options other HTTP options which are passed to package ingester plugin
 * @return an OutputStream on which the request body is written, it then MUST be closed.
 */
public OutputStream startPut(String collection, String type, NameValuePair options[])
        throws IOException, HttpException {
    PipedOutputStream out = new PipedOutputStream();
    PipedInputStream in = new PipedInputStream(out);
    startPutInternal(collection, type, options, in);
    lastPutThread = new Thread(this);
    lastPutThread.start();
    return out;
}

From source file:jp.ikedam.jenkins.plugins.viewcopy_builder.ViewcopyBuilder.java

/**
 * Returns a InputStream of a XML document.
 * //  w w  w.j  av  a 2s . c  om
 * @param doc
 * @return
 * @throws TransformerException
 * @throws IOException
 */
private InputStream getInputStreamFromDocument(Document doc) throws TransformerException, IOException {
    TransformerFactory tfactory = TransformerFactory.newInstance();
    Transformer transformer = tfactory.newTransformer();
    PipedOutputStream sout = new PipedOutputStream();
    PipedInputStream sin = new PipedInputStream(sout);
    transformer.transform(new DOMSource(doc), new StreamResult(sout));
    sout.close();

    return sin;
}

From source file:org.talend.dataprep.transformation.service.TransformationService.java

/**
 * Compute the given aggregation./*w  w  w  .j av a2 s . c  o m*/
 *
 * @param rawParams the aggregation rawParams as body rawParams.
 */
// @formatter:off
@RequestMapping(value = "/aggregate", method = POST, produces = APPLICATION_JSON_VALUE, consumes = APPLICATION_JSON_VALUE)
@ApiOperation(value = "Compute the aggregation according to the request body rawParams", consumes = APPLICATION_JSON_VALUE)
@VolumeMetered
public AggregationResult aggregate(
        @ApiParam(value = "The aggregation rawParams in json") @RequestBody final String rawParams) {
    // @formatter:on

    // parse the aggregation parameters
    final AggregationParameters parameters;
    try {
        parameters = mapper.readerFor(AggregationParameters.class).readValue(rawParams);
        LOG.debug("Aggregation requested {}", parameters);
    } catch (IOException e) {
        throw new TDPException(CommonErrorCodes.BAD_AGGREGATION_PARAMETERS, e);
    }

    InputStream contentToAggregate;

    // get the content of the preparation (internal call with piped streams)
    if (StringUtils.isNotBlank(parameters.getPreparationId())) {
        try {
            PipedOutputStream temp = new PipedOutputStream();
            contentToAggregate = new PipedInputStream(temp);

            // because of piped streams, processing must be asynchronous
            Runnable r = () -> {
                try {
                    final ExportParameters exportParameters = new ExportParameters();
                    exportParameters.setPreparationId(parameters.getPreparationId());
                    exportParameters.setDatasetId(parameters.getDatasetId());
                    if (parameters.getFilter() != null) {
                        exportParameters.setFilter(mapper.readTree(parameters.getFilter()));
                    }
                    exportParameters.setExportType(JSON);
                    exportParameters.setStepId(parameters.getStepId());

                    final StreamingResponseBody body = executeSampleExportStrategy(exportParameters);
                    body.writeTo(temp);
                } catch (IOException e) {
                    throw new TDPException(CommonErrorCodes.UNABLE_TO_AGGREGATE, e);
                }
            };
            executor.execute(r);
        } catch (IOException e) {
            throw new TDPException(CommonErrorCodes.UNABLE_TO_AGGREGATE, e);
        }
    } else {
        final DataSetGet dataSetGet = context.getBean(DataSetGet.class, parameters.getDatasetId(), false, true);
        contentToAggregate = dataSetGet.execute();
    }

    // apply the aggregation
    try (JsonParser parser = mapper.getFactory().createParser(contentToAggregate)) {
        final DataSet dataSet = mapper.readerFor(DataSet.class).readValue(parser);
        return aggregationService.aggregate(parameters, dataSet);
    } catch (IOException e) {
        throw new TDPException(CommonErrorCodes.UNABLE_TO_PARSE_JSON, e);
    } finally {
        // don't forget to release the connection
        if (contentToAggregate != null) {
            try {
                contentToAggregate.close();
            } catch (IOException e) {
                LOG.warn("Could not close dataset input stream while aggregating", e);
            }
        }
    }
}

From source file:eu.stratosphere.pact.runtime.task.util.OutputEmitterTest.java

@Test
public void testWrongKeyClass() {

    // Test for IntValue
    @SuppressWarnings("unchecked")
    final TypeComparator<Record> doubleComp = new RecordComparatorFactory(new int[] { 0 },
            new Class[] { DoubleValue.class }).createComparator();
    final ChannelSelector<SerializationDelegate<Record>> oe1 = new OutputEmitter<Record>(
            ShipStrategyType.PARTITION_HASH, doubleComp);
    final SerializationDelegate<Record> delegate = new SerializationDelegate<Record>(
            new RecordSerializerFactory().getSerializer());

    PipedInputStream pipedInput = new PipedInputStream(1024 * 1024);
    DataInputStream in = new DataInputStream(pipedInput);
    DataOutputStream out;/*from  w ww  .  j  a  v a 2 s. c om*/
    Record rec = null;

    try {
        out = new DataOutputStream(new PipedOutputStream(pipedInput));

        rec = new Record(1);
        rec.setField(0, new IntValue());

        rec.write(out);
        rec = new Record();
        rec.read(in);

    } catch (IOException e) {
        fail("Test erroneous");
    }

    try {
        delegate.setInstance(rec);
        oe1.selectChannels(delegate, 100);
    } catch (DeserializationException re) {
        return;
    }
    Assert.fail("Expected a NullKeyFieldException.");
}