Example usage for java.io PipedOutputStream close

List of usage examples for java.io PipedOutputStream close

Introduction

In this page you can find the example usage for java.io PipedOutputStream close.

Prototype

public void close() throws IOException 

Source Link

Document

Closes this piped output stream and releases any system resources associated with this stream.

Usage

From source file:org.onebusaway.nyc.webapp.actions.admin.ReportingAction.java

public String submit() throws Exception {
    Session session = null;//from  ww  w  . jav a  2  s  . c o  m
    Connection connection = null;
    Statement statement = null;
    ResultSet rs = null;
    try {
        session = sessionFactory.openSession();
        connection = getConnectionFromSession(session);
        connection.setReadOnly(true);

        statement = connection.createStatement();
        rs = statement.executeQuery(query);

    } catch (Exception e) {
        // make sure everything is closed if an exception was thrown
        try {
            rs.close();
        } catch (Exception ex) {
        }
        try {
            statement.close();
        } catch (Exception ex) {
        }
        try {
            connection.close();
        } catch (Exception ex) {
        }
        try {
            session.close();
        } catch (Exception ex) {
        }

        reportError = e.getMessage();
        // not really "success", but we'll use the same template with the error displayed
        return SUCCESS;
    }

    // final so the output generator thread can close it
    final Session finalSession = session;
    final Connection finalConnection = connection;
    final Statement finalStatement = statement;
    final ResultSet finalRS = rs;

    final PipedInputStream pipedInputStream = new PipedInputStream();
    final PipedOutputStream pipedOutputStream = new PipedOutputStream(pipedInputStream);

    executorService.execute(new Runnable() {

        @Override
        public void run() {
            try {
                // column labels
                ResultSetMetaData metaData = finalRS.getMetaData();
                int columnCount = metaData.getColumnCount();

                for (int i = 0; i < columnCount; i++) {
                    String columnName = metaData.getColumnName(i + 1);
                    byte[] bytes = columnName.getBytes();

                    if (i > 0)
                        pipedOutputStream.write(columnDelimiter);

                    pipedOutputStream.write(bytes);
                }

                pipedOutputStream.write(newline);

                // column values
                while (finalRS.next()) {
                    for (int i = 0; i < columnCount; i++) {
                        String value = finalRS.getString(i + 1);

                        if (value == null)
                            value = "null";
                        else {
                            // remove returns
                            value = value.replaceAll("\n|\r", "");
                        }

                        byte[] valueBytes = value.getBytes();

                        if (i > 0)
                            pipedOutputStream.write(columnDelimiter);

                        pipedOutputStream.write(valueBytes);
                    }

                    pipedOutputStream.write(newline);
                }
            } catch (Exception e) {
            } finally {
                try {
                    pipedOutputStream.close();
                } catch (IOException e) {
                }
                try {
                    finalRS.close();
                } catch (SQLException e) {
                }
                try {
                    finalStatement.close();
                } catch (SQLException e) {
                }
                try {
                    finalConnection.close();
                } catch (SQLException e) {
                }
                try {
                    finalSession.close();
                } catch (Exception e) {
                }
            }
        }
    });

    // the input stream will get populated by the piped output stream
    inputStream = pipedInputStream;
    return "download";
}

From source file:hudson.scm.subversion.CheckoutUpdater.java

@Override
public UpdateTask createTask() {
    return new UpdateTask() {
        private static final long serialVersionUID = 8349986526712487762L;

        @Override/*from w  w w.jav  a 2  s .co m*/
        public List<External> perform() throws IOException, InterruptedException {
            final SVNUpdateClient svnuc = clientManager.getUpdateClient();
            final List<External> externals = new ArrayList<External>(); // store discovered externals to here

            listener.getLogger().println("Cleaning local Directory " + location.getLocalDir());
            Util.deleteContentsRecursive(new File(ws, location.getLocalDir()));

            // buffer the output by a separate thread so that the update operation
            // won't be blocked by the remoting of the data
            PipedOutputStream pos = new PipedOutputStream();
            StreamCopyThread sct = new StreamCopyThread("svn log copier", new PipedInputStream(pos),
                    listener.getLogger());
            sct.start();

            try {

                SVNRevision r = getRevision(location);

                String revisionName = r.getDate() != null ? fmt.format(r.getDate()) : r.toString();

                listener.getLogger()
                        .println("Checking out " + location.remote + " at revision " + revisionName);

                File local = new File(ws, location.getLocalDir());
                SubversionUpdateEventHandler eventHandler = new SubversionUpdateEventHandler(
                        new PrintStream(pos), externals, local, location.getLocalDir());
                svnuc.setEventHandler(eventHandler);
                svnuc.setExternalsHandler(eventHandler);
                svnuc.setIgnoreExternals(location.isIgnoreExternalsOption());

                SVNDepth svnDepth = getSvnDepth(location.getDepthOption());
                svnuc.doCheckout(location.getSVNURL(), local.getCanonicalFile(), SVNRevision.HEAD, r, svnDepth,
                        true);
            } catch (SVNCancelException e) {
                if (isAuthenticationFailedError(e)) {
                    e.printStackTrace(listener.error("Failed to check out " + location.remote));
                    return null;
                } else {
                    listener.error("Subversion checkout has been canceled");
                    throw (InterruptedException) new InterruptedException().initCause(e);
                }
            } catch (SVNException e) {
                e.printStackTrace(listener.error("Failed to check out " + location.remote));
                throw new IOException("Failed to check out " + location.remote, e);
            } finally {
                try {
                    pos.close();
                } finally {
                    try {
                        sct.join(); // wait for all data to be piped.
                    } catch (InterruptedException e) {
                        throw new IOException2("interrupted", e);
                    }
                }
            }

            return externals;
        }
    };
}

From source file:com.kdmanalytics.toif.assimilator.Assimilator.java

/**
 * process the toif files.//from www .ja  v  a 2 s .  c  om
 * 
 * @param toifFiles
 *          list of toif files to process.
 * @param smallestBigNumber2
 *          the smallest number from the end of the long scale. used for the bnodes at the end of
 *          the repository
 * @param blacklistPath
 *          string that is the name of the directory of the project root.
 * @return
 * @throws IOException
 * @throws ToifException
 */
private void processToifFiles(final List<File> toifFiles, Long id, Long smallestBigNumber2,
        String blacklistPath) throws IOException, ToifException {
    PipedInputStream toifIn = new PipedInputStream();
    final PipedOutputStream toifOut = new PipedOutputStream(toifIn);

    // final ToifMerger toifMerger = getToifMerger(new PrintWriter(toifOut),
    // id, smallestBigNumber2, blacklistPath);

    PrintWriter w = new PrintWriter(toifOut);
    final ToifMerger toifMerger = getToifMerger(w, id, smallestBigNumber2, blacklistPath);
    new Thread(new Runnable() {

        @Override
        public void run() {
            Long offset = mergeToif(toifMerger, toifFiles);

            setOffset(offset);
            try {
                toifOut.close();
            } catch (IOException e) {
                LOG.error("", e);
            }
        }

    }).start();

    streamStatementsToRepo(toifIn);
}

From source file:com.kdmanalytics.toif.assimilator.Assimilator.java

/**
 * process the tkdm files/*w  ww .j a  va 2  s .  c  o m*/
 * 
 * @param tkdmFiles
 *          the list of tkdm files to process.
 * @return
 * @throws IOException
 * @throws ToifException
 */
private RepositoryMerger processTkdmFiles(final List<File> tkdmFiles) throws IOException, ToifException {
    final PipedInputStream in = new PipedInputStream();
    final PipedOutputStream out = new PipedOutputStream(in);

    String assemblyName = "Assembly";
    int possition = outputLocation.getName().lastIndexOf(".");
    if (possition != -1) {
        assemblyName = outputLocation.getName().substring(0, possition);
    }

    final RepositoryMerger kdmMerger = getTkdmMerger(new PrintWriter(out), assemblyName);
    new Thread(new Runnable() {

        @Override
        public void run() {
            mergeTkdm(kdmMerger, tkdmFiles);
            kdmMerger.close();
            try {
                out.close();
            } catch (IOException e) {
                LOG.error("", e);
                //e.printStackTrace();
            }
        }
    }).start();

    streamStatementsToRepo(in);
    return kdmMerger;
}

From source file:com.kdmanalytics.toif.assimilator.Assimilator.java

private void processKdmXmlFile(final List<File> kdmFiles)
        throws FileNotFoundException, IOException, RepositoryException, ToifException {
    if (debug) {//from   w  ww .j  a v  a  2s.  c  om
        LOG.debug("processing kdm file...");
        //System.err.println("processing kdm file...");
    }

    PipedInputStream in = new PipedInputStream();
    final PipedOutputStream out = new PipedOutputStream(in);
    final ThreadStatus status = new ThreadStatus();

    Thread t = new Thread(new Runnable() {

        @Override
        public void run() {
            KdmXmlHandler handler = null;
            try {
                if (kdmFiles.size() > 1) {
                    final String msg = "There should only be one .kdm file.";
                    LOG.error(msg);
                    throw new ToifException(msg);
                } else if (kdmFiles.size() == 1) {
                    File kdmFile = kdmFiles.get(0); // get the head of
                                                    // thelist.
                    handler = load(kdmFile, out);
                }
                out.flush();
                out.close();

                if (handler == null) {
                    return;
                }
                setNextId(handler.getNextId());
                setSmallestBigNumber(handler.getSmallestBigNumber());
                // increase
            } catch (IOException e) {
                final String msg = "IO exception whilst processing kdm file. "
                        + ". Possibly an existing kdm file is in your input path!";

                LOG.error(msg, e);
                status.exception = new ToifException(msg, e);
            } catch (RepositoryException e) {
                final String msg = "Repository Exception whilst processing kdm file. "
                        + ". Possibly an existing kdm file is in your input path!";

                LOG.error(msg, e);
                status.exception = new ToifException(msg, e);
            } catch (ToifException e) {
                // RJF final String msg =
                // "Processing Exception whilst processing kdm file. "
                // + ". Possibly that input file is invalid XML!";

                // LOG.error(msg, e);
                status.exception = e;
            } finally {
                if (out != null)
                    try {
                        out.close();
                    } catch (IOException e) {
                        // Just leave it alone
                        LOG.error("unable to close stream");
                    }
            }
        }
    });

    // ---------------------------------------------------------
    // Unable to change logic within the short time frame given so
    // adding a means to catch unknown exceptions in thread
    // ----------------------------------------------------------
    Thread.UncaughtExceptionHandler tueh = new Thread.UncaughtExceptionHandler() {

        public void uncaughtException(Thread th, Throwable ex) {
            LOG.error("Uncaught exception: " + ex);
            status.exception = (Exception) ex;
        }
    };

    t.setUncaughtExceptionHandler(tueh);
    t.start();

    streamStatementsToRepo(in);
    try {
        t.join();

        // Check if we enoutered exception during processing and
        // proxy throw if we have one
        if (status.exception != null) {
            // Leave alone if already a ToifException
            if (status.exception instanceof ToifException)
                throw (ToifException) status.exception;
            else
                throw new ToifException(status.exception);

        }
    } catch (InterruptedException e) {
        LOG.error("Interrupted");
        throw new ToifException("Interrupted");
    }
}

From source file:com.dtolabs.rundeck.core.execution.impl.jsch.JschNodeExecutor.java

public NodeExecutorResult executeCommand(final ExecutionContext context, final String[] command,
        final INodeEntry node) {
    if (null == node.getHostname() || null == node.extractHostname()) {
        return NodeExecutorResultImpl.createFailure(StepFailureReason.ConfigurationFailure,
                "Hostname must be set to connect to remote node '" + node.getNodename() + "'", node);
    }/*from w w w. j a  v  a2 s  .  c o  m*/

    final ExecutionListener listener = context.getExecutionListener();
    final Project project = new Project();
    AntSupport.addAntBuildListener(listener, project);

    boolean success = false;
    final ExtSSHExec sshexec;
    //perform jsch sssh command
    final NodeSSHConnectionInfo nodeAuthentication = new NodeSSHConnectionInfo(node, framework, context);
    final int timeout = nodeAuthentication.getSSHTimeout();
    try {

        sshexec = SSHTaskBuilder.build(node, command, project, context.getDataContext(), nodeAuthentication,
                context.getLoglevel(), listener);
    } catch (SSHTaskBuilder.BuilderException e) {
        return NodeExecutorResultImpl.createFailure(StepFailureReason.ConfigurationFailure, e.getMessage(),
                node);
    }

    //Sudo support

    final ExecutorService executor = Executors.newSingleThreadExecutor(new ThreadFactory() {
        public Thread newThread(Runnable r) {
            return new Thread(null, r,
                    "SudoResponder " + node.getNodename() + ": " + System.currentTimeMillis());
        }
    });

    final Future<ResponderTask.ResponderResult> responderFuture;
    final SudoResponder sudoResponder = SudoResponder.create(node, framework, context);
    Runnable responderCleanup = null;
    if (sudoResponder.isSudoEnabled() && sudoResponder.matchesCommandPattern(command[0])) {
        final DisconnectResultHandler resultHandler = new DisconnectResultHandler();

        //configure two piped i/o stream pairs, to connect to the input/output of the SSH connection
        final PipedInputStream responderInput = new PipedInputStream();
        final PipedOutputStream responderOutput = new PipedOutputStream();
        final PipedInputStream jschInput = new PipedInputStream();
        //lead pipe allows connected inputstream to close and not hang the writer to this stream
        final PipedOutputStream jschOutput = new LeadPipeOutputStream();
        try {
            responderInput.connect(jschOutput);
            jschInput.connect(responderOutput);
        } catch (IOException e) {
            return NodeExecutorResultImpl.createFailure(StepFailureReason.IOFailure, e.getMessage(), node);
        }

        //first sudo prompt responder
        ResponderTask responder = new ResponderTask(sudoResponder, responderInput, responderOutput,
                resultHandler);

        /**
         * Callable will be executed by the ExecutorService
         */
        final Callable<ResponderTask.ResponderResult> responderResultCallable;

        //if 2nd responder
        final SudoResponder sudoResponder2 = SudoResponder.create(node, framework, context, SUDO2_OPT_PREFIX,
                DEFAULT_SUDO2_PASSWORD_OPTION, DEFAULT_SUDO2_COMMAND_PATTERN);
        if (sudoResponder2.isSudoEnabled()
                && sudoResponder2.matchesCommandPattern(CLIUtils.generateArgline(null, command, false))) {
            logger.debug("Enable second sudo responder");

            sudoResponder2.setDescription("Second " + SudoResponder.DEFAULT_DESCRIPTION);
            sudoResponder.setDescription("First " + SudoResponder.DEFAULT_DESCRIPTION);

            //sequence of the first then the second sudo responder
            responderResultCallable = responder.createSequence(sudoResponder2);
        } else {
            responderResultCallable = responder;
        }

        //set up SSH execution
        sshexec.setAllocatePty(true);
        sshexec.setInputStream(jschInput);
        sshexec.setSecondaryStream(jschOutput);
        sshexec.setDisconnectHolder(resultHandler);

        responderFuture = executor.submit(responderResultCallable);
        //close streams after responder is finished
        responderCleanup = new Runnable() {
            public void run() {
                logger.debug("SudoResponder shutting down...");
                try {
                    responderInput.close();
                } catch (IOException e) {
                    e.printStackTrace();
                }
                try {
                    responderOutput.flush();
                    responderOutput.close();
                } catch (IOException e) {
                    e.printStackTrace();
                }
                //executor pool shutdown
                executor.shutdownNow();
            }
        };
        executor.submit(responderCleanup);
    } else {
        responderFuture = null;
    }
    if (null != context.getExecutionListener()) {
        context.getExecutionListener().log(3, "Starting SSH Connection: " + nodeAuthentication.getUsername()
                + "@" + node.getHostname() + " (" + node.getNodename() + ")");
    }
    String errormsg = null;
    FailureReason failureReason = null;
    try {
        sshexec.execute();
        success = true;
    } catch (BuildException e) {
        final ExtractFailure extractJschFailure = extractFailure(e, node, timeout, framework);
        errormsg = extractJschFailure.getErrormsg();
        failureReason = extractJschFailure.getReason();
        context.getExecutionListener().log(0, errormsg);
    }
    if (null != responderCleanup) {
        responderCleanup.run();
    }
    shutdownAndAwaitTermination(executor);
    if (null != responderFuture) {
        try {
            logger.debug("Waiting 5 seconds for responder future result");
            final ResponderTask.ResponderResult result = responderFuture.get(5, TimeUnit.SECONDS);
            logger.debug("Responder result: " + result);
            if (!result.isSuccess() && !result.isInterrupted()) {
                context.getExecutionListener().log(0,
                        result.getResponder().toString() + " failed: " + result.getFailureReason());
            }
        } catch (InterruptedException e) {
            //ignore
        } catch (java.util.concurrent.ExecutionException e) {
            e.printStackTrace();
        } catch (TimeoutException e) {
            //ignore
        }
    }
    final int resultCode = sshexec.getExitStatus();

    if (success) {
        return NodeExecutorResultImpl.createSuccess(node);
    } else {
        return NodeExecutorResultImpl.createFailure(failureReason, errormsg, node, resultCode);
    }
}

From source file:de.resol.vbus.LiveInputStreamTest.java

@Test
public void testReadHeader() throws Exception {
    byte[] refPacketBuffer1 = Hex.decodeHex(
            "aa362335331034430d2a0004080c00671014181c00272024282c00673034383c00274044484c00675054585c00276064686c00677074787c00270004080c0f581014181c0f182024282c0f583034383c0f184044484c0f58"
                    .toCharArray());/*from   www .ja  va 2s. c  o  m*/
    byte[] refDgramBuffer1 = Hex.decodeHex("aa362335332034433353300332630851".toCharArray());
    byte[] refTgramBuffer1 = Hex
            .decodeHex("aa2211443330772e000c1824303c48000354606c7804101c70472834404c5864707f6c".toCharArray());
    byte[] refZeroBuffer1 = Hex.decodeHex("00000000000000000000000000000000".toCharArray());
    byte[] refMsbBuffer1 = Hex.decodeHex("80808080808080808080808080808080".toCharArray());

    String refPacketId1 = "13_2336_3335_10_4334";
    String refDgramId1 = "13_2336_3335_20_4334_0000";
    String refTgramId1 = "13_1122_3344_30_77";

    PipedInputStream refIs1 = new PipedInputStream(2048);
    PipedOutputStream refOs1 = new PipedOutputStream(refIs1);
    int refChannel1 = 0x13;

    LiveInputStream testIs = new LiveInputStream(refIs1, refChannel1);

    refOs1.write(refPacketBuffer1);

    assertEquals(refPacketId1, testIs.readHeader().getId());

    refOs1.write(refDgramBuffer1);
    refOs1.write(refTgramBuffer1);

    assertEquals(refDgramId1, testIs.readHeader().getId());
    assertEquals(refTgramId1, testIs.readHeader().getId());

    // write a partial header (before protocol version)
    refOs1.write(refTgramBuffer1);
    refOs1.write(refDgramBuffer1, 0, 5);

    assertEquals(refTgramId1, testIs.readHeader().getId());

    refOs1.write(refDgramBuffer1, 5, 11);

    assertEquals(refDgramId1, testIs.readHeader().getId());

    // write a broken header (without sync byte)
    refOs1.write(refDgramBuffer1, 1, 15);
    refOs1.write(refTgramBuffer1);

    assertEquals(refTgramId1, testIs.readHeader().getId());

    // write unknown version
    refOs1.write(refDgramBuffer1, 0, 5);
    refOs1.write(0x05);
    refOs1.write(refTgramBuffer1);

    assertEquals(refTgramId1, testIs.readHeader().getId());

    // write partial packet (incomplete header)
    refOs1.write(refDgramBuffer1);
    refOs1.write(refPacketBuffer1, 0, 9);

    assertEquals(refDgramId1, testIs.readHeader().getId());

    refOs1.write(refPacketBuffer1, 9, refPacketBuffer1.length - 9);

    assertEquals(refPacketId1, testIs.readHeader().getId());

    // write defect packet (header msb)
    refOs1.write(refPacketBuffer1, 0, 9);
    refOs1.write(refMsbBuffer1, 0, 1);
    refOs1.write(refDgramBuffer1);

    assertEquals(refDgramId1, testIs.readHeader().getId());

    // write defect packet (header checksum)
    refOs1.write(refPacketBuffer1, 0, 9);
    refOs1.write(refZeroBuffer1, 0, 1);
    refOs1.write(refDgramBuffer1);

    assertEquals(refDgramId1, testIs.readHeader().getId());

    // write partial packet (incomplete frame data)
    refOs1.write(refDgramBuffer1);
    refOs1.write(refPacketBuffer1, 0, 15);

    assertEquals(refDgramId1, testIs.readHeader().getId());

    refOs1.write(refPacketBuffer1, 15, refPacketBuffer1.length - 15);

    assertEquals(refPacketId1, testIs.readHeader().getId());

    // write defect packet (frame data msb)
    refOs1.write(refPacketBuffer1, 0, refPacketBuffer1.length - 1);
    refOs1.write(refMsbBuffer1, 0, 1);
    refOs1.write(refDgramBuffer1);

    assertEquals(refDgramId1, testIs.readHeader().getId());

    // write partial datagram
    refOs1.write(refTgramBuffer1);
    refOs1.write(refDgramBuffer1, 0, 15);

    assertEquals(refTgramId1, testIs.readHeader().getId());

    refOs1.write(refDgramBuffer1, 15, refDgramBuffer1.length - 15);

    assertEquals(refDgramId1, testIs.readHeader().getId());

    // write defect datagram
    refOs1.write(refDgramBuffer1, 0, 15);
    refOs1.write(refMsbBuffer1, 0, 1);
    refOs1.write(refTgramBuffer1);

    assertEquals(refTgramId1, testIs.readHeader().getId());

    // write partial telegram (incomplete header)
    refOs1.write(refDgramBuffer1);
    refOs1.write(refTgramBuffer1, 0, 7);

    assertEquals(refDgramId1, testIs.readHeader().getId());

    refOs1.write(refTgramBuffer1, 7, refTgramBuffer1.length - 7);

    assertEquals(refTgramId1, testIs.readHeader().getId());

    // write defect telegram (header msb)
    refOs1.write(refTgramBuffer1, 0, 7);
    refOs1.write(refMsbBuffer1, 0, 1);
    refOs1.write(refDgramBuffer1);

    assertEquals(refDgramId1, testIs.readHeader().getId());

    // write defect telegram (header checksum)
    refOs1.write(refTgramBuffer1, 0, 7);
    refOs1.write(refZeroBuffer1, 0, 1);
    refOs1.write(refDgramBuffer1);

    assertEquals(refDgramId1, testIs.readHeader().getId());

    // write partial telegram (incomplete frame data)
    refOs1.write(refDgramBuffer1);
    refOs1.write(refTgramBuffer1, 0, 15);

    assertEquals(refDgramId1, testIs.readHeader().getId());

    refOs1.write(refTgramBuffer1, 15, refTgramBuffer1.length - 15);

    assertEquals(refTgramId1, testIs.readHeader().getId());

    // write defect telegram (frame data msb)
    refOs1.write(refTgramBuffer1, 0, refTgramBuffer1.length - 1);
    refOs1.write(refMsbBuffer1, 0, 1);
    refOs1.write(refDgramBuffer1);

    assertEquals(refDgramId1, testIs.readHeader().getId());

    // close pipe
    refOs1.close();

    assertEquals(null, testIs.readHeader());
}

From source file:org.jaqpot.core.service.client.jpdi.JPDIClientImpl.java

@Override
public Future<Model> train(Dataset dataset, Algorithm algorithm, Map<String, Object> parameters,
        String predictionFeature, MetaInfo modelMeta, String taskId) {

    CompletableFuture<Model> futureModel = new CompletableFuture<>();

    TrainingRequest trainingRequest = new TrainingRequest();
    trainingRequest.setDataset(dataset);
    trainingRequest.setParameters(parameters);
    trainingRequest.setPredictionFeature(predictionFeature);
    //        String trainingRequestString = serializer.write(trainingRequest);

    final HttpPost request = new HttpPost(algorithm.getTrainingService());

    PipedOutputStream out = new PipedOutputStream();
    PipedInputStream in;/*from   www .ja va2 s  .c o  m*/
    try {
        in = new PipedInputStream(out);
    } catch (IOException ex) {
        futureModel.completeExceptionally(ex);
        return futureModel;
    }
    InputStreamEntity entity = new InputStreamEntity(in, ContentType.APPLICATION_JSON);
    entity.setChunked(true);

    request.setEntity(entity);
    request.addHeader("Accept", "application/json");

    Future futureResponse = client.execute(request, new FutureCallback<HttpResponse>() {

        @Override
        public void completed(final HttpResponse response) {
            futureMap.remove(taskId);
            int status = response.getStatusLine().getStatusCode();
            try {
                InputStream responseStream = response.getEntity().getContent();

                switch (status) {
                case 200:
                case 201:
                    TrainingResponse trainingResponse = serializer.parse(responseStream,
                            TrainingResponse.class);
                    Model model = new Model();
                    model.setId(randomStringGenerator.nextString(20));
                    model.setActualModel(trainingResponse.getRawModel());
                    model.setPmmlModel(trainingResponse.getPmmlModel());
                    model.setAdditionalInfo(trainingResponse.getAdditionalInfo());
                    model.setAlgorithm(algorithm);
                    model.setParameters(parameters);
                    model.setDatasetUri(dataset != null ? dataset.getDatasetURI() : null);

                    //Check if independedFeatures of model exist in dataset
                    List<String> filteredIndependedFeatures = new ArrayList<String>();

                    if (dataset != null && dataset.getFeatures() != null
                            && trainingResponse.getIndependentFeatures() != null)
                        for (String feature : trainingResponse.getIndependentFeatures()) {
                            for (FeatureInfo featureInfo : dataset.getFeatures()) {
                                if (feature.equals(featureInfo.getURI()))
                                    filteredIndependedFeatures.add(feature);
                            }
                        }

                    model.setIndependentFeatures(filteredIndependedFeatures);
                    model.setDependentFeatures(Arrays.asList(predictionFeature));
                    model.setMeta(modelMeta);

                    List<String> predictedFeatures = new ArrayList<>();
                    for (String featureTitle : trainingResponse.getPredictedFeatures()) {
                        Feature predictionFeatureResource = featureHandler.findByTitleAndSource(featureTitle,
                                "algorithm/" + algorithm.getId());
                        if (predictionFeatureResource == null) {
                            // Create the prediction features (POST /feature)
                            String predFeatID = randomStringGenerator.nextString(12);
                            predictionFeatureResource = new Feature();
                            predictionFeatureResource.setId(predFeatID);
                            predictionFeatureResource.setPredictorFor(predictionFeature);
                            predictionFeatureResource.setMeta(MetaInfoBuilder.builder()
                                    .addSources(
                                            /*messageBody.get("base_uri") + */"algorithm/" + algorithm.getId())
                                    .addComments("Feature created to hold predictions by algorithm with ID "
                                            + algorithm.getId())
                                    .addTitles(featureTitle).addSeeAlso(predictionFeature)
                                    .addCreators(algorithm.getMeta().getCreators()).build());
                            /* Create feature */
                            featureHandler.create(predictionFeatureResource);
                        }
                        predictedFeatures.add(baseURI + "feature/" + predictionFeatureResource.getId());
                    }
                    model.setPredictedFeatures(predictedFeatures);
                    futureModel.complete(model);
                    break;
                case 400:
                    String message = new BufferedReader(new InputStreamReader(responseStream)).lines()
                            .collect(Collectors.joining("\n"));
                    futureModel.completeExceptionally(new BadRequestException(message));
                    break;
                case 500:
                    message = new BufferedReader(new InputStreamReader(responseStream)).lines()
                            .collect(Collectors.joining("\n"));
                    futureModel.completeExceptionally(new InternalServerErrorException(message));
                    break;
                default:
                    message = new BufferedReader(new InputStreamReader(responseStream)).lines()
                            .collect(Collectors.joining("\n"));
                    futureModel.completeExceptionally(new InternalServerErrorException(message));
                }
            } catch (IOException | UnsupportedOperationException ex) {
                futureModel.completeExceptionally(ex);
            }
        }

        @Override
        public void failed(final Exception ex) {
            futureMap.remove(taskId);
            futureModel.completeExceptionally(ex);
        }

        @Override
        public void cancelled() {
            futureMap.remove(taskId);
            futureModel.cancel(true);
        }

    });

    serializer.write(trainingRequest, out);
    try {
        out.close();
    } catch (IOException ex) {
        futureModel.completeExceptionally(ex);
    }

    futureMap.put(taskId, futureResponse);
    return futureModel;
}

From source file:org.jaqpot.core.service.client.jpdi.JPDIClientImpl.java

@Override
public Future<Dataset> predict(Dataset inputDataset, Model model, MetaInfo datasetMeta, String taskId) {

    CompletableFuture<Dataset> futureDataset = new CompletableFuture<>();

    Dataset dataset = DatasetFactory.copy(inputDataset);
    Dataset tempWithDependentFeatures = DatasetFactory.copy(dataset,
            new HashSet<>(model.getDependentFeatures()));

    dataset.getDataEntry().parallelStream().forEach(dataEntry -> {
        dataEntry.getValues().keySet().retainAll(model.getIndependentFeatures());
    });//w  w w .j ava2s.  c o  m
    PredictionRequest predictionRequest = new PredictionRequest();
    predictionRequest.setDataset(dataset);
    predictionRequest.setRawModel(model.getActualModel());
    predictionRequest.setAdditionalInfo(model.getAdditionalInfo());

    final HttpPost request = new HttpPost(model.getAlgorithm().getPredictionService());
    request.addHeader("Accept", "application/json");
    request.addHeader("Content-Type", "application/json");

    PipedOutputStream out = new PipedOutputStream();
    PipedInputStream in;
    try {
        in = new PipedInputStream(out);
    } catch (IOException ex) {
        futureDataset.completeExceptionally(ex);
        return futureDataset;
    }
    request.setEntity(new InputStreamEntity(in, ContentType.APPLICATION_JSON));

    Future futureResponse = client.execute(request, new FutureCallback<HttpResponse>() {

        @Override
        public void completed(final HttpResponse response) {
            futureMap.remove(taskId);
            int status = response.getStatusLine().getStatusCode();
            try {
                InputStream responseStream = response.getEntity().getContent();

                switch (status) {
                case 200:
                case 201:
                    try {
                        PredictionResponse predictionResponse = serializer.parse(responseStream,
                                PredictionResponse.class);

                        List<LinkedHashMap<String, Object>> predictions = predictionResponse.getPredictions();
                        if (dataset.getDataEntry().isEmpty()) {
                            DatasetFactory.addEmptyRows(dataset, predictions.size());
                        }
                        List<Feature> features = featureHandler
                                .findBySource("algorithm/" + model.getAlgorithm().getId());
                        IntStream.range(0, dataset.getDataEntry().size())
                                // .parallel()
                                .forEach(i -> {
                                    Map<String, Object> row = predictions.get(i);
                                    DataEntry dataEntry = dataset.getDataEntry().get(i);
                                    if (model.getAlgorithm().getOntologicalClasses().contains("ot:Scaling")
                                            || model.getAlgorithm().getOntologicalClasses()
                                                    .contains("ot:Transformation")) {
                                        dataEntry.getValues().clear();
                                        dataset.getFeatures().clear();
                                    }
                                    row.entrySet().stream().forEach(entry -> {
                                        //                                                    Feature feature = featureHandler.findByTitleAndSource(entry.getKey(), "algorithm/" + model.getAlgorithm().getId());
                                        Feature feature = features.stream()
                                                .filter(f -> f.getMeta().getTitles().contains(entry.getKey()))
                                                .findFirst().orElse(null);
                                        if (feature == null) {
                                            return;
                                        }
                                        dataEntry.getValues().put(baseURI + "feature/" + feature.getId(),
                                                entry.getValue());
                                        FeatureInfo featInfo = new FeatureInfo(
                                                baseURI + "feature/" + feature.getId(),
                                                feature.getMeta().getTitles().stream().findFirst().get());
                                        featInfo.setCategory(Dataset.DescriptorCategory.PREDICTED);
                                        dataset.getFeatures().add(featInfo);
                                    });
                                });
                        dataset.setId(randomStringGenerator.nextString(20));
                        dataset.setTotalRows(dataset.getDataEntry().size());
                        dataset.setMeta(datasetMeta);
                        futureDataset.complete(DatasetFactory.mergeColumns(dataset, tempWithDependentFeatures));
                    } catch (Exception ex) {
                        futureDataset.completeExceptionally(ex);
                    }
                    break;
                case 400:
                    String message = new BufferedReader(new InputStreamReader(responseStream)).lines()
                            .collect(Collectors.joining("\n"));
                    futureDataset.completeExceptionally(new BadRequestException(message));
                    break;
                case 404:
                    message = new BufferedReader(new InputStreamReader(responseStream)).lines()
                            .collect(Collectors.joining("\n"));
                    futureDataset.completeExceptionally(new NotFoundException(message));
                    break;
                case 500:
                    message = new BufferedReader(new InputStreamReader(responseStream)).lines()
                            .collect(Collectors.joining("\n"));
                    futureDataset.completeExceptionally(new InternalServerErrorException(message));
                    break;
                default:
                    message = new BufferedReader(new InputStreamReader(responseStream)).lines()
                            .collect(Collectors.joining("\n"));
                    futureDataset.completeExceptionally(new InternalServerErrorException(message));
                }
            } catch (IOException | UnsupportedOperationException ex) {
                futureDataset.completeExceptionally(ex);
            }
        }

        @Override
        public void failed(final Exception ex) {
            futureMap.remove(taskId);
            futureDataset.completeExceptionally(new InternalServerErrorException(ex));
        }

        @Override
        public void cancelled() {
            futureMap.remove(taskId);
            futureDataset.cancel(true);
        }
    });
    serializer.write(predictionRequest, out);
    try {
        out.close();
    } catch (IOException ex) {
        futureDataset.completeExceptionally(ex);
    }
    futureMap.put(taskId, futureResponse);
    return futureDataset;
}

From source file:org.apache.james.mailrepository.jcr.JCRMailRepository.java

/**
 * Writes the message content to the <code>jcr:content/jcr:data</code>
 * binary property./*ww  w  .  ja va2  s  .co  m*/
 * 
 * @param node
 *            mail node
 * @param message
 *            mail message
 * @throws MessagingException
 *             if a messaging error occurs
 * @throws RepositoryException
 *             if a repository error occurs
 * @throws IOException
 *             if an IO error occurs
 */
@SuppressWarnings("deprecation")
private void setMessage(Node node, final MimeMessage message) throws RepositoryException, IOException {
    try {
        node = node.getNode("jcr:content");
    } catch (PathNotFoundException e) {
        node = node.getProperty("jcr:content").getNode();
    }

    PipedInputStream input = new PipedInputStream();
    final PipedOutputStream output = new PipedOutputStream(input);
    new Thread() {
        public void run() {
            try {
                message.writeTo(output);
            } catch (Exception e) {
            } finally {
                try {
                    output.close();
                } catch (IOException e) {
                }
            }
        }
    }.start();
    node.setProperty("jcr:data", input);
}