Example usage for javax.servlet.http HttpServletResponse SC_SERVICE_UNAVAILABLE

List of usage examples for javax.servlet.http HttpServletResponse SC_SERVICE_UNAVAILABLE

Introduction

In this page you can find the example usage for javax.servlet.http HttpServletResponse SC_SERVICE_UNAVAILABLE.

Prototype

int SC_SERVICE_UNAVAILABLE

To view the source code for javax.servlet.http HttpServletResponse SC_SERVICE_UNAVAILABLE.

Click Source Link

Document

Status code (503) indicating that the HTTP server is temporarily overloaded, and unable to handle the request.

Usage

From source file:org.b3log.solo.processor.FeedProcessor.java

/**
 * Tag articles RSS output.//from  www .jav  a 2 s.  co  m
 * 
 * @param context the specified context
 * @throws IOException io exception 
 */
@RequestProcessing(value = { "/tag-articles-rss.do" }, method = { HTTPRequestMethod.GET,
        HTTPRequestMethod.HEAD })
public void tagArticlesRSS(final HTTPRequestContext context) throws IOException {
    final HttpServletResponse response = context.getResponse();
    final HttpServletRequest request = context.getRequest();

    final RssRenderer renderer = new RssRenderer();
    context.setRenderer(renderer);

    final String queryString = request.getQueryString();
    if (Strings.isEmptyOrNull(queryString)) {
        response.sendError(HttpServletResponse.SC_BAD_REQUEST);
        return;
    }

    final String oIdMap = queryString.split("&")[0];
    final String tagId = oIdMap.split("=")[1];

    final Channel channel = new Channel();
    try {
        final JSONObject tag = tagRepository.get(tagId);
        if (null == tag) {
            response.sendError(HttpServletResponse.SC_NOT_FOUND);
            return;
        }

        final String tagTitle = tag.getString(Tag.TAG_TITLE);

        final JSONObject preference = preferenceQueryService.getPreference();
        if (null == preference) {
            response.sendError(HttpServletResponse.SC_NOT_FOUND);
            return;
        }

        final String blogTitle = preference.getString(Preference.BLOG_TITLE);
        final String blogSubtitle = preference.getString(Preference.BLOG_SUBTITLE) + ", " + tagTitle;
        final String blogHost = preference.getString(Preference.BLOG_HOST);

        channel.setTitle(StringEscapeUtils.escapeXml(blogTitle));
        channel.setLastBuildDate(TimeZones.getTime(preference.getString(Preference.TIME_ZONE_ID)));
        channel.setLink("http://" + blogHost);
        channel.setAtomLink("http://" + blogHost + "/tag-articles-rss.do");
        channel.setGenerator("B3log Solo, ver " + SoloServletListener.VERSION);
        final String localeString = preference.getString(Preference.LOCALE_STRING);
        final String country = Locales.getCountry(localeString).toLowerCase();
        final String language = Locales.getLanguage(localeString).toLowerCase();
        channel.setLanguage(language + '-' + country);
        channel.setDescription(blogSubtitle);

        final JSONObject tagArticleResult = tagArticleRepository.getByTagId(tagId, 1, ENTRY_OUTPUT_CNT);
        final JSONArray tagArticleRelations = tagArticleResult.getJSONArray(Keys.RESULTS);
        if (0 == tagArticleRelations.length()) {
            response.sendError(HttpServletResponse.SC_NOT_FOUND);
            return;
        }

        final List<JSONObject> articles = new ArrayList<JSONObject>();
        for (int i = 0; i < tagArticleRelations.length(); i++) {
            final JSONObject tagArticleRelation = tagArticleRelations.getJSONObject(i);
            final String articleId = tagArticleRelation.getString(Article.ARTICLE + "_" + Keys.OBJECT_ID);
            final JSONObject article = articleRepository.get(articleId);
            if (article.getBoolean(Article.ARTICLE_IS_PUBLISHED) // Skips the unpublished article
                    && Strings.isEmptyOrNull(article.optString(Article.ARTICLE_VIEW_PWD))) { // Skips article with password
                articles.add(article);
            }
        }

        final boolean hasMultipleUsers = Users.getInstance().hasMultipleUsers();
        String authorName = "";

        if (!hasMultipleUsers && !articles.isEmpty()) {
            authorName = articleUtils.getAuthor(articles.get(0)).getString(User.USER_NAME);
        }

        final boolean isFullContent = "fullContent".equals(preference.getString(Preference.FEED_OUTPUT_MODE));

        for (int i = 0; i < articles.size(); i++) {
            final JSONObject article = articles.get(i);
            final Item item = new Item();
            channel.addItem(item);
            final String title = StringEscapeUtils.escapeXml(article.getString(Article.ARTICLE_TITLE));
            item.setTitle(title);
            final String description = isFullContent
                    ? StringEscapeUtils.escapeXml(article.getString(Article.ARTICLE_CONTENT))
                    : StringEscapeUtils.escapeXml(article.optString(Article.ARTICLE_ABSTRACT));
            item.setDescription(description);
            final Date pubDate = (Date) article.get(Article.ARTICLE_UPDATE_DATE);
            item.setPubDate(pubDate);
            final String link = "http://" + blogHost + article.getString(Article.ARTICLE_PERMALINK);
            item.setLink(link);
            item.setGUID(link);

            final String authorEmail = article.getString(Article.ARTICLE_AUTHOR_EMAIL);
            if (hasMultipleUsers) {
                authorName = StringEscapeUtils
                        .escapeXml(articleUtils.getAuthor(article).getString(User.USER_NAME));
            }

            item.setAuthor(authorEmail + "(" + authorName + ")");

            final String tagsString = article.getString(Article.ARTICLE_TAGS_REF);
            final String[] tagStrings = tagsString.split(",");
            for (int j = 0; j < tagStrings.length; j++) {
                final org.b3log.solo.model.feed.rss.Category catetory = new org.b3log.solo.model.feed.rss.Category();
                item.addCatetory(catetory);
                catetory.setTerm(tagStrings[j]);
            }
        }

        renderer.setContent(channel.toString());
    } catch (final Exception e) {
        LOGGER.log(Level.SEVERE, "Get tag article rss error", e);

        try {
            context.getResponse().sendError(HttpServletResponse.SC_SERVICE_UNAVAILABLE);
        } catch (final IOException ex) {
            throw new RuntimeException(ex);
        }
    }
}

From source file:org.alfresco.repo.web.scripts.RepositoryContainer.java

/**
 * Execute script within required level of transaction
 * //from w  w  w  .  j  ava  2 s.c om
 * @param script WebScript
 * @param scriptReq WebScriptRequest
 * @param scriptRes WebScriptResponse
 * @throws IOException
 */
protected void transactionedExecute(final WebScript script, final WebScriptRequest scriptReq,
        final WebScriptResponse scriptRes) throws IOException {
    try {
        final Description description = script.getDescription();
        if (description.getRequiredTransaction() == RequiredTransaction.none) {
            script.execute(scriptReq, scriptRes);
        } else {
            final BufferedRequest bufferedReq;
            final BufferedResponse bufferedRes;
            RequiredTransactionParameters trxParams = description.getRequiredTransactionParameters();
            if (trxParams.getCapability() == TransactionCapability.readwrite) {
                if (trxParams.getBufferSize() > 0) {
                    if (logger.isDebugEnabled())
                        logger.debug("Creating Transactional Response for ReadWrite transaction; buffersize="
                                + trxParams.getBufferSize());

                    // create buffered request and response that allow transaction retrying
                    bufferedReq = new BufferedRequest(scriptReq, streamFactory);
                    bufferedRes = new BufferedResponse(scriptRes, trxParams.getBufferSize());
                } else {
                    if (logger.isDebugEnabled())
                        logger.debug("Transactional Response bypassed for ReadWrite - buffersize=0");
                    bufferedReq = null;
                    bufferedRes = null;
                }
            } else {
                bufferedReq = null;
                bufferedRes = null;
            }

            // encapsulate script within transaction
            RetryingTransactionCallback<Object> work = new RetryingTransactionCallback<Object>() {
                public Object execute() throws Exception {
                    try {
                        if (logger.isDebugEnabled())
                            logger.debug("Begin retry transaction block: "
                                    + description.getRequiredTransaction() + ","
                                    + description.getRequiredTransactionParameters().getCapability());

                        if (bufferedRes == null) {
                            script.execute(scriptReq, scriptRes);
                        } else {
                            // Reset the request and response in case of a transaction retry
                            bufferedReq.reset();
                            bufferedRes.reset();
                            script.execute(bufferedReq, bufferedRes);
                        }
                    } catch (Exception e) {
                        if (logger.isDebugEnabled()) {
                            logger.debug("Transaction exception: " + description.getRequiredTransaction() + ": "
                                    + e.getMessage());
                            // Note: user transaction shouldn't be null, but just in case inside this exception handler
                            UserTransaction userTrx = RetryingTransactionHelper.getActiveUserTransaction();
                            if (userTrx != null) {
                                logger.debug("Transaction status: " + userTrx.getStatus());
                            }
                        }

                        UserTransaction userTrx = RetryingTransactionHelper.getActiveUserTransaction();
                        if (userTrx != null) {
                            if (userTrx.getStatus() != Status.STATUS_MARKED_ROLLBACK) {
                                if (logger.isDebugEnabled())
                                    logger.debug("Marking web script transaction for rollback");
                                try {
                                    userTrx.setRollbackOnly();
                                } catch (Throwable re) {
                                    if (logger.isDebugEnabled())
                                        logger.debug(
                                                "Caught and ignoring exception during marking for rollback: "
                                                        + re.getMessage());
                                }
                            }
                        }

                        // re-throw original exception for retry
                        throw e;
                    } finally {
                        if (logger.isDebugEnabled())
                            logger.debug("End retry transaction block: " + description.getRequiredTransaction()
                                    + "," + description.getRequiredTransactionParameters().getCapability());
                    }

                    return null;
                }
            };

            boolean readonly = description.getRequiredTransactionParameters()
                    .getCapability() == TransactionCapability.readonly;
            boolean requiresNew = description.getRequiredTransaction() == RequiredTransaction.requiresnew;

            // log a warning if we detect a GET webscript being run in a readwrite transaction, GET calls should
            // NOT have any side effects so this scenario as a warning sign something maybe amiss, see ALF-10179.
            if (logger.isDebugEnabled() && !readonly && "GET".equalsIgnoreCase(description.getMethod())) {
                logger.debug("Webscript with URL '" + scriptReq.getURL()
                        + "' is a GET request but it's descriptor has declared a readwrite transaction is required");
            }

            try {
                RetryingTransactionHelper transactionHelper = transactionService.getRetryingTransactionHelper();
                if (script instanceof LoginPost) {
                    //login script requires read-write transaction because of authorization intercepter
                    transactionHelper.setForceWritable(true);
                }
                transactionHelper.doInTransaction(work, readonly, requiresNew);
            } catch (TooBusyException e) {
                // Map TooBusyException to a 503 status code
                throw new WebScriptException(HttpServletResponse.SC_SERVICE_UNAVAILABLE, e.getMessage(), e);
            } finally {
                // Get rid of any temporary files
                if (bufferedReq != null) {
                    bufferedReq.close();
                }
            }

            // Ensure a response is always flushed after successful execution
            if (bufferedRes != null) {
                bufferedRes.writeResponse();
            }

        }
    } catch (IOException ioe) {
        Throwable socketException = ExceptionStackUtil.getCause(ioe, SocketException.class);
        Class<?> clientAbortException = null;
        try {
            clientAbortException = Class.forName("org.apache.catalina.connector.ClientAbortException");
        } catch (ClassNotFoundException e) {
            // do nothing
        }
        // Note: if you need to look for more exceptions in the stack, then create a static array and pass it in
        if ((socketException != null && socketException.getMessage().contains("Broken pipe"))
                || (clientAbortException != null
                        && ExceptionStackUtil.getCause(ioe, clientAbortException) != null)) {
            if (logger.isDebugEnabled()) {
                logger.warn("Client has cut off communication", ioe);
            } else {
                logger.info("Client has cut off communication");
            }
        } else {
            throw ioe;
        }
    }
}

From source file:org.apereo.portal.portlet.rendering.PortletExecutionManager.java

@Override
public void doPortletServeResource(IPortletWindowId portletWindowId, HttpServletRequest request,
        HttpServletResponse response) {//from   w w  w . jav  a  2 s. c o  m
    final long timeout = getPortletResourceTimeout(portletWindowId, request);

    final IPortletExecutionWorker<Long> resourceWorker = this.portletWorkerFactory.createResourceWorker(request,
            response, portletWindowId);
    resourceWorker.submit();

    try {
        resourceWorker.get(timeout);
    } catch (Exception e) {
        // Log the exception but not this thread's stacktrace. The portlet worker has already logged its stack trace
        this.logger.error("resource worker {} failed with exception {}", resourceWorker, e.toString());
        // render generic serveResource error
        try {
            if (!response.isCommitted()) {
                response.sendError(HttpServletResponse.SC_SERVICE_UNAVAILABLE, "resource unavailable");
            }
        } catch (IOException e1) {
            logger.error("caught IOException trying to send error response for failed resource worker", e);
        }
    }

    //If the worker is still running add it to the hung-workers queue
    if (!resourceWorker.isComplete()) {
        cancelWorker(request, resourceWorker);
    }
}

From source file:org.springframework.web.servlet.mvc.support.DefaultHandlerExceptionResolver.java

/**
 * Handle the case where an async request timed out.
 * <p>The default implementation sends an HTTP 503 error.
 * @param ex the {@link AsyncRequestTimeoutException }to be handled
 * @param request current HTTP request/*from   www  . j  av a  2 s .c om*/
 * @param response current HTTP response
 * @param handler the executed handler, or {@code null} if none chosen
 * at the time of the exception (for example, if multipart resolution failed)
 * @return an empty ModelAndView indicating the exception was handled
 * @throws IOException potentially thrown from response.sendError()
 * @since 4.2.8
 */
protected ModelAndView handleAsyncRequestTimeoutException(AsyncRequestTimeoutException ex,
        HttpServletRequest request, HttpServletResponse response, @Nullable Object handler) throws IOException {

    if (!response.isCommitted()) {
        response.sendError(HttpServletResponse.SC_SERVICE_UNAVAILABLE);
    } else if (logger.isErrorEnabled()) {
        logger.error("Async timeout for " + request.getMethod() + " [" + request.getRequestURI() + "]");
    }
    return new ModelAndView();
}

From source file:org.signserver.web.GenericProcessServlet.java

private void processRequest(final HttpServletRequest req, final HttpServletResponse res, final int workerId,
        final byte[] data, String fileName, final String pdfPassword, final ProcessType processType,
        final MetaDataHolder metadataHolder) throws java.io.IOException, ServletException {
    final String remoteAddr = req.getRemoteAddr();
    if (LOG.isDebugEnabled()) {
        LOG.debug("Recieved HTTP process request for worker " + workerId + ", from ip " + remoteAddr);
    }/*from ww  w. ja  v a 2  s  .co  m*/

    // Client certificate
    Certificate clientCertificate = null;
    Certificate[] certificates = (X509Certificate[]) req.getAttribute("javax.servlet.request.X509Certificate");
    if (certificates != null) {
        clientCertificate = certificates[0];
    }

    // Create request context and meta data
    final RequestContext context = new RequestContext(clientCertificate, remoteAddr);
    RequestMetadata metadata = RequestMetadata.getInstance(context);

    IClientCredential credential;

    if (clientCertificate instanceof X509Certificate) {
        final X509Certificate cert = (X509Certificate) clientCertificate;
        LOG.debug("Authentication: certificate");
        credential = new CertificateClientCredential(cert.getSerialNumber().toString(16),
                cert.getIssuerDN().getName());
    } else {
        // Check is client supplied basic-credentials
        final String authorization = req.getHeader(HTTP_AUTH_BASIC_AUTHORIZATION);
        if (authorization != null) {
            LOG.debug("Authentication: password");

            final String decoded[] = new String(Base64.decode(authorization.split("\\s")[1])).split(":", 2);

            credential = new UsernamePasswordClientCredential(decoded[0], decoded[1]);
        } else {
            LOG.debug("Authentication: none");
            credential = null;
        }
    }
    context.put(RequestContext.CLIENT_CREDENTIAL, credential);

    // Create log map
    LogMap logMap = LogMap.getInstance(context);

    final String xForwardedFor = req.getHeader(RequestContext.X_FORWARDED_FOR);

    // Add HTTP specific log entries
    logMap.put(IWorkerLogger.LOG_REQUEST_FULLURL,
            req.getRequestURL().append("?").append(req.getQueryString()).toString());
    logMap.put(IWorkerLogger.LOG_REQUEST_LENGTH, String.valueOf(data.length));
    logMap.put(IWorkerLogger.LOG_FILENAME, fileName);
    logMap.put(IWorkerLogger.LOG_XFORWARDEDFOR, xForwardedFor);
    logMap.put(IWorkerLogger.LOG_WORKER_NAME,
            getWorkerSession().getCurrentWorkerConfig(workerId).getProperty(PropertiesConstants.NAME));

    if (xForwardedFor != null) {
        context.put(RequestContext.X_FORWARDED_FOR, xForwardedFor);
    }

    // Store filename for use by archiver etc
    if (fileName != null) {
        fileName = stripPath(fileName);
    }
    context.put(RequestContext.FILENAME, fileName);
    context.put(RequestContext.RESPONSE_FILENAME, fileName);

    // PDF Password
    if (pdfPassword != null) {
        metadata.put(RequestContext.METADATA_PDFPASSWORD, pdfPassword);
    }

    addRequestMetaData(metadataHolder, metadata);

    if (LOG.isDebugEnabled()) {
        LOG.debug("Received bytes of length: " + data.length);
    }

    final int requestId = random.nextInt();

    try {
        String responseText;

        switch (processType) {
        case signDocument:
            final GenericServletResponse servletResponse = (GenericServletResponse) getWorkerSession().process(
                    new AdminInfo("Client user", null, null), workerId,
                    new GenericServletRequest(requestId, data, req), context);

            if (servletResponse.getRequestID() != requestId) { // TODO: Is this possible to get at all?
                LOG.error("Response ID " + servletResponse.getRequestID() + " not matching request ID "
                        + requestId);
                res.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR,
                        "Request and response ID missmatch");
                return;
            }

            byte[] processedBytes = (byte[]) servletResponse.getProcessedData();

            res.setContentType(servletResponse.getContentType());
            Object responseFileName = context.get(RequestContext.RESPONSE_FILENAME);
            if (responseFileName instanceof String) {
                res.setHeader("Content-Disposition", "attachment; filename=\"" + responseFileName + "\"");
            }
            res.setContentLength(processedBytes.length);
            res.getOutputStream().write(processedBytes);
            break;
        case validateDocument:
            final GenericValidationResponse validationResponse = (GenericValidationResponse) getWorkerSession()
                    .process(new AdminInfo("Client user", null, null), workerId,
                            new GenericValidationRequest(requestId, data), context);

            responseText = validationResponse.isValid() ? "VALID" : "INVALID";

            if (LOG.isDebugEnabled()) {
                final Validation validation = validationResponse.getCertificateValidation();

                if (validation != null) {
                    LOG.debug("Cert validation status: "
                            + validationResponse.getCertificateValidation().getStatusMessage());
                }
            }

            res.setContentType("text/plain");
            res.setContentLength(responseText.getBytes().length);
            res.getOutputStream().write(responseText.getBytes());
            break;
        case validateCertificate:
            final Certificate cert;
            try {
                cert = CertTools.getCertfromByteArray(data);

                final String certPurposes = req.getParameter(CERT_PURPOSES_PROPERTY_NAME);
                final ValidateResponse certValidationResponse = (ValidateResponse) getWorkerSession().process(
                        new AdminInfo("Client user", null, null), workerId,
                        new ValidateRequest(cert, certPurposes), context);
                final Validation validation = certValidationResponse.getValidation();

                final StringBuilder sb = new StringBuilder(validation.getStatus().name());

                sb.append(";");

                final String validPurposes = certValidationResponse.getValidCertificatePurposes();

                if (validPurposes != null) {
                    sb.append(certValidationResponse.getValidCertificatePurposes());
                }
                sb.append(";");
                sb.append(certValidationResponse.getValidation().getStatusMessage());
                sb.append(";");
                sb.append(certValidationResponse.getValidation().getRevokationReason());
                sb.append(";");

                final Date revocationDate = certValidationResponse.getValidation().getRevokedDate();

                if (revocationDate != null) {
                    sb.append(certValidationResponse.getValidation().getRevokedDate().getTime());
                }

                responseText = sb.toString();

                res.setContentType("text/plain");
                res.setContentLength(responseText.getBytes().length);
                res.getOutputStream().write(responseText.getBytes());
            } catch (CertificateException e) {
                LOG.error("Invalid certificate: " + e.getMessage());
                sendBadRequest(res, "Invalid certificate: " + e.getMessage());
                return;
            }
            break;
        }
        ;

        res.getOutputStream().close();

    } catch (AuthorizationRequiredException e) {
        LOG.debug("Sending back HTTP 401: " + e.getLocalizedMessage());

        final String httpAuthBasicRealm = "SignServer Worker " + workerId;

        res.setHeader(HTTP_AUTH_BASIC_WWW_AUTHENTICATE, "Basic realm=\"" + httpAuthBasicRealm + "\"");
        res.sendError(HttpServletResponse.SC_UNAUTHORIZED, "Authorization Required");
    } catch (AccessDeniedException e) {
        LOG.debug("Sending back HTTP 403: " + e.getLocalizedMessage());
        res.sendError(HttpServletResponse.SC_FORBIDDEN, "Access Denied");
    } catch (NoSuchWorkerException ex) {
        res.sendError(HttpServletResponse.SC_NOT_FOUND, "Worker Not Found");
    } catch (IllegalRequestException e) {
        res.sendError(HttpServletResponse.SC_BAD_REQUEST, e.getMessage());
    } catch (CryptoTokenOfflineException e) {
        res.sendError(HttpServletResponse.SC_SERVICE_UNAVAILABLE, e.getMessage());
    } catch (ServiceUnavailableException e) {
        res.sendError(HttpServletResponse.SC_SERVICE_UNAVAILABLE, e.getMessage());
    } catch (NotGrantedException e) {
        res.sendError(HttpServletResponse.SC_FORBIDDEN, e.getMessage());
    } catch (SignServerException e) {
        res.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, e.getMessage());
    }
}

From source file:com.alfaariss.oa.sso.web.profile.logout.LogoutProfile.java

private void processLogoutState(HttpServletResponse servletResponse, ISession session) throws OAException {
    _logger.debug("Logout - Perform State Request");
    try {/* ww w .jav a 2  s. c om*/
        servletResponse.setContentType("text/plain");

        if (session == null) {
            _logger.debug("no session");
            servletResponse.sendError(HttpServletResponse.SC_BAD_REQUEST);
        } else {
            switch (session.getState()) {
            case USER_LOGOUT_FAILED:
            case USER_LOGOUT_PARTIAL:
            case USER_LOGOUT_SUCCESS: {
                _logger.debug("finished");
                servletResponse.sendError(HttpServletResponse.SC_OK);
                break;
            }
            default: {
                _logger.debug("in progress");
                servletResponse.sendError(HttpServletResponse.SC_SERVICE_UNAVAILABLE);
            }
            }
        }
    } catch (Exception e) {
        _logger.fatal("Internal error during logout state resolving", e);
        throw new OAException(SystemErrors.ERROR_INTERNAL);
    }
}

From source file:org.jasig.portal.portlet.rendering.PortletExecutionManager.java

@Override
public void doPortletServeResource(IPortletWindowId portletWindowId, HttpServletRequest request,
        HttpServletResponse response) {//from   w w w .  j a v a2  s.  c  om
    final long timeout = getPortletResourceTimeout(portletWindowId, request);

    final IPortletExecutionWorker<Long> resourceWorker = this.portletWorkerFactory.createResourceWorker(request,
            response, portletWindowId);
    resourceWorker.submit();

    try {
        resourceWorker.get(timeout);
    } catch (Exception e) {
        //log the exception
        this.logger.error("resource worker failed with exception", e);
        // render generic serveResource error
        try {
            if (!response.isCommitted()) {
                response.sendError(HttpServletResponse.SC_SERVICE_UNAVAILABLE, "resource unavailable");
            }
        } catch (IOException e1) {
            logger.error("caught IOException trying to send error response for failed resource worker", e);
        }
    }

    //If the worker is still running add it to the hung-workers queue
    if (!resourceWorker.isComplete()) {
        cancelWorker(request, resourceWorker);
    }
}

From source file:byps.http.HHttpServlet.java

protected void doNegotiate(final HttpServletRequest request, final HttpServletResponse response,
        final ByteBuffer ibuf) throws ServletException, BException {
    if (log.isDebugEnabled())
        log.debug("doNegotiate(");

    // Initialization finished?
    if (!isInitialized.get()) {
        int httpStatus = HttpServletResponse.SC_SERVICE_UNAVAILABLE;
        if (log.isInfoEnabled())
            log.info("HHttpServlet not initialized. Return " + httpStatus);
        response.setStatus(httpStatus);//from ww w  .j ava2 s  .co  m
        return;
    }

    final HSession sess = doCreateSession(request);

    // Process Negotiate message

    final HRequestContext rctxt = createRequestContext(request, response, HConstants.PROCESS_MESSAGE_ASYNC);

    final BAsyncResult<ByteBuffer> asyncResponse = new BAsyncResult<ByteBuffer>() {

        @Override
        public void setAsyncResult(ByteBuffer obuf, Throwable e) {
            if (log.isDebugEnabled())
                log.debug("setAsyncResult(");
            final HttpServletResponse resp = (HttpServletResponse) rctxt.getResponse();
            try {
                if (e != null) {
                    resp.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
                    e.printStackTrace(resp.getWriter());
                } else {
                    resp.setContentType("application/json");
                    OutputStream os = resp.getOutputStream();
                    BWire.bufferToStream(obuf, false, os);
                }

            } catch (Throwable ex) {
                if (log.isInfoEnabled())
                    log.info("Failed to write negotiate result", e);
                try {
                    resp.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
                    e.printStackTrace(resp.getWriter());
                } catch (IOException ignored) {
                }
            } finally {
                rctxt.complete();
            }
            if (log.isDebugEnabled())
                log.debug(")setAsyncResult");
        }

    };

    final Runnable run = new Runnable() {
        public void run() {
            if (log.isDebugEnabled())
                log.debug("run(");
            try {
                BServer server = sess.getServer();

                BProtocol protocol = server.negotiate(server.getTransport().getTargetId(), ibuf, asyncResponse);

                // Teile dem Reverse-Client das ausgehandelte Protokoll mit.
                // Server und ClientR mssen dasselbe Protokoll verwenden, andernfalls
                // bruchten sie separate Sessions

                BClient clientR = sess.getClientR();
                if (clientR != null) {
                    if (log.isDebugEnabled())
                        log.debug("clientR=" + clientR + ", set negotiated protocol=" + protocol);
                    clientR.getTransport().setProtocol(protocol);
                }

            } catch (Throwable e) {
                asyncResponse.setAsyncResult(null, e);
            }
            if (log.isDebugEnabled())
                log.debug(")run");
        }
    };

    rctxt.start(run);
    if (log.isDebugEnabled())
        log.debug(")doNegotiate");
}

From source file:org.jitsi.videobridge.rest.HandlerImpl.java

/**
 * Handles an HTTP request for a COLIBRI-related resource (e.g.
 * <tt>Conference</tt>, <tt>Content</tt>, and <tt>Channel</tt>) represented
 * in JSON format.//from www  .j a v  a  2 s . co  m
 *
 * @param target the target of the request
 * @param baseRequest the original unwrapped {@link Request} object
 * @param request the request either as the {@code Request} object or a
 * wrapper of that request
 * @param response the response either as the {@code Response} object or a
 * wrapper of that response
 * @throws IOException
 * @throws ServletException
 */
private void handleColibriJSON(String target, Request baseRequest, HttpServletRequest request,
        HttpServletResponse response) throws IOException, ServletException {
    if (target == null) {
        // TODO Auto-generated method stub
    } else if (target.startsWith(CONFERENCES)) {
        target = target.substring(CONFERENCES.length());
        if (target.startsWith("/"))
            target = target.substring(1);

        String requestMethod = request.getMethod();

        if ("".equals(target)) {
            if (GET_HTTP_METHOD.equals(requestMethod)) {
                // List the Conferences of Videobridge.
                doGetConferencesJSON(baseRequest, request, response);
            } else if (POST_HTTP_METHOD.equals(requestMethod)) {
                // Create a new Conference in Videobridge.
                doPostConferencesJSON(baseRequest, request, response);
            } else {
                response.setStatus(HttpServletResponse.SC_METHOD_NOT_ALLOWED);
            }
        } else {
            // The target at this point of the execution is reduced to a
            // String which starts with a Conference ID.
            if (GET_HTTP_METHOD.equals(requestMethod)) {
                // Retrieve a representation of a Conference of Videobridge.
                doGetConferenceJSON(target, baseRequest, request, response);
            } else if (PATCH_HTTP_METHOD.equals(requestMethod)) {
                // Modify a Conference of Videobridge.
                doPatchConferenceJSON(target, baseRequest, request, response);
            } else {
                response.setStatus(HttpServletResponse.SC_METHOD_NOT_ALLOWED);
            }
        }
    } else if (target.equals(STATISTICS)) {
        if (GET_HTTP_METHOD.equals(request.getMethod())) {
            // Get the VideobridgeStatistics of Videobridge.
            doGetStatisticsJSON(baseRequest, request, response);
        } else {
            response.setStatus(HttpServletResponse.SC_METHOD_NOT_ALLOWED);
        }
    } else if (target.equals(SHUTDOWN)) {
        if (!shutdownEnabled) {
            response.setStatus(HttpServletResponse.SC_SERVICE_UNAVAILABLE);
            return;
        }

        if (POST_HTTP_METHOD.equals(request.getMethod())) {
            // Get the VideobridgeStatistics of Videobridge.
            doPostShutdownJSON(baseRequest, request, response);
        } else {
            response.setStatus(HttpServletResponse.SC_METHOD_NOT_ALLOWED);
        }
    }
}

From source file:org.epics.archiverappliance.retrieval.DataRetrievalServlet.java

private void doGetMultiPV(HttpServletRequest req, HttpServletResponse resp)
        throws ServletException, IOException {

    PoorMansProfiler pmansProfiler = new PoorMansProfiler();

    // Gets the list of PVs specified by the `pv` parameter
    // String arrays might be inefficient for retrieval. In any case, they are sorted, which is essential later on.
    List<String> pvNames = Arrays.asList(req.getParameterValues("pv"));

    // Ensuring that the AA has finished starting up before requests are accepted.
    if (configService.getStartupState() != STARTUP_SEQUENCE.STARTUP_COMPLETE) {
        String msg = "Cannot process data retrieval requests for specified PVs ("
                + StringUtils.join(pvNames, ", ") + ") until the appliance has completely started up.";
        logger.error(msg);//from   ww  w.  ja  v  a 2s .c  o m
        resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
        resp.sendError(HttpServletResponse.SC_SERVICE_UNAVAILABLE, msg);
        return;
    }

    // Getting various fields from arguments
    String startTimeStr = req.getParameter("from");
    String endTimeStr = req.getParameter("to");
    boolean useReduced = false;
    String useReducedStr = req.getParameter("usereduced");
    if (useReducedStr != null && !useReducedStr.equals("")) {
        try {
            useReduced = Boolean.parseBoolean(useReducedStr);
        } catch (Exception ex) {
            logger.error("Exception parsing usereduced", ex);
            useReduced = false;
        }
    }

    // Getting MIME type
    String extension = req.getPathInfo().split("\\.")[1];
    logger.info("Mime is " + extension);

    if (!extension.equals("json") && !extension.equals("raw") && !extension.equals("jplot")
            && !extension.equals("qw")) {
        String msg = "Mime type " + extension + " is not supported. Please use \"json\", \"jplot\" or \"raw\".";
        resp.setHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
        resp.sendError(HttpServletResponse.SC_BAD_REQUEST, msg);
        return;
    }

    boolean useChunkedEncoding = true;
    String doNotChunkStr = req.getParameter("donotchunk");
    if (doNotChunkStr != null && !doNotChunkStr.equals("false")) {
        logger.info("Turning off HTTP chunked encoding");
        useChunkedEncoding = false;
    }

    boolean fetchLatestMetadata = false;
    String fetchLatestMetadataStr = req.getParameter("fetchLatestMetadata");
    if (fetchLatestMetadataStr != null && fetchLatestMetadataStr.equals("true")) {
        logger.info("Adding a call to the engine to fetch the latest metadata");
        fetchLatestMetadata = true;
    }

    // For data retrieval we need a PV info. However, in case of PV's that have long since retired, we may not want to have PVTypeInfo's in the system.
    // So, we support a template PV that lays out the data sources.
    // During retrieval, you can pass in the PV as a template and we'll clone this and make a temporary copy.
    String retiredPVTemplate = req.getParameter("retiredPVTemplate");

    // Goes through given PVs and returns bad request error.
    int nullPVs = 0;
    for (String pvName : pvNames) {
        if (pvName == null) {
            nullPVs++;
        }
        if (nullPVs > 0) {
            logger.warn("Some PVs are null in the request.");
            resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
            resp.sendError(HttpServletResponse.SC_BAD_REQUEST);
            return;
        }
    }

    if (pvNames.toString().matches("^.*" + ARCH_APPL_PING_PV + ".*$")) {
        logger.debug("Processing ping PV - this is used to validate the connection with the client.");
        processPingPV(req, resp);
        return;
    }

    for (String pvName : pvNames)
        if (pvName.endsWith(".VAL")) {
            int len = pvName.length();
            pvName = pvName.substring(0, len - 4);
            logger.info("Removing .VAL from pvName for request giving " + pvName);
        }

    // ISO datetimes are of the form "2011-02-02T08:00:00.000Z"
    Timestamp end = TimeUtils.plusHours(TimeUtils.now(), 1);
    if (endTimeStr != null) {
        try {
            end = TimeUtils.convertFromISO8601String(endTimeStr);
        } catch (IllegalArgumentException ex) {
            try {
                end = TimeUtils.convertFromDateTimeStringWithOffset(endTimeStr);
            } catch (IllegalArgumentException ex2) {
                String msg = "Cannot parse time " + endTimeStr;
                logger.warn(msg, ex2);
                resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
                resp.sendError(HttpServletResponse.SC_BAD_REQUEST, msg);
                return;
            }
        }
    }

    // We get one day by default
    Timestamp start = TimeUtils.minusDays(end, 1);
    if (startTimeStr != null) {
        try {
            start = TimeUtils.convertFromISO8601String(startTimeStr);
        } catch (IllegalArgumentException ex) {
            try {
                start = TimeUtils.convertFromDateTimeStringWithOffset(startTimeStr);
            } catch (IllegalArgumentException ex2) {
                String msg = "Cannot parse time " + startTimeStr;
                logger.warn(msg, ex2);
                resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
                resp.sendError(HttpServletResponse.SC_BAD_REQUEST, msg);
                return;
            }
        }
    }

    if (end.before(start)) {
        String msg = "For request, end " + end.toString() + " is before start " + start.toString() + " for pvs "
                + StringUtils.join(pvNames, ", ");
        logger.error(msg);
        resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
        resp.sendError(HttpServletResponse.SC_BAD_REQUEST, msg);
        return;
    }

    LinkedList<TimeSpan> requestTimes = new LinkedList<TimeSpan>();

    // We can specify a list of time stamp pairs using the optional timeranges parameter
    String timeRangesStr = req.getParameter("timeranges");
    if (timeRangesStr != null) {
        boolean continueWithRequest = parseTimeRanges(resp, "[" + StringUtils.join(pvNames, ", ") + "]",
                requestTimes, timeRangesStr);
        if (!continueWithRequest) {
            // Cannot parse the time ranges properly; we so abort the request.
            String msg = "The specified time ranges could not be processed appropriately. Aborting.";
            logger.info(msg);
            resp.setHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
            resp.sendError(HttpServletResponse.SC_BAD_REQUEST, msg);
            return;
        }

        // Override the start and the end so that the mergededup consumer works correctly.
        start = requestTimes.getFirst().getStartTime();
        end = requestTimes.getLast().getEndTime();

    } else {
        requestTimes.add(new TimeSpan(start, end));
    }

    assert (requestTimes.size() > 0);

    // Get a post processor for each PV specified in pvNames
    // If PV in the form <pp>(<pv>), process it
    String postProcessorUserArg = req.getParameter("pp");
    List<String> postProcessorUserArgs = new ArrayList<>(pvNames.size());
    List<PostProcessor> postProcessors = new ArrayList<>(pvNames.size());
    for (int i = 0; i < pvNames.size(); i++) {
        postProcessorUserArgs.add(postProcessorUserArg);

        if (pvNames.get(i).contains("(")) {
            if (!pvNames.get(i).contains(")")) {
                String msg = "Unbalanced paren " + pvNames.get(i);
                logger.error(msg);
                resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
                resp.sendError(HttpServletResponse.SC_BAD_REQUEST, msg);
                return;
            }
            String[] components = pvNames.get(i).split("[(,)]");
            postProcessorUserArg = components[0];
            postProcessorUserArgs.set(i, postProcessorUserArg);
            pvNames.set(i, components[1]);
            if (components.length > 2) {
                for (int j = 2; j < components.length; j++) {
                    postProcessorUserArgs.set(i, postProcessorUserArgs.get(i) + "_" + components[j]);
                }
            }
            logger.info("After parsing the function call syntax pvName is " + pvNames.get(i)
                    + " and postProcessorUserArg is " + postProcessorUserArg);
        }
        postProcessors.add(PostProcessors.findPostProcessor(postProcessorUserArg));
    }

    List<PVTypeInfo> typeInfos = new ArrayList<PVTypeInfo>(pvNames.size());
    for (int i = 0; i < pvNames.size(); i++) {
        typeInfos.add(PVNames.determineAppropriatePVTypeInfo(pvNames.get(i), configService));
    }
    pmansProfiler.mark("After PVTypeInfo");

    for (int i = 0; i < pvNames.size(); i++)
        if (typeInfos.get(i) == null && RetrievalState.includeExternalServers(req)) {
            logger.debug(
                    "Checking to see if pv " + pvNames.get(i) + " is served by a external Archiver Server");
            typeInfos.set(i,
                    checkIfPVisServedByExternalServer(pvNames.get(i), start, req, resp, useChunkedEncoding));
        }

    for (int i = 0; i < pvNames.size(); i++) {
        if (typeInfos.get(i) == null) {
            // TODO Only needed if we're forwarding the request to another server.
            if (resp.isCommitted()) {
                logger.debug("Proxied the data thru an external server for PV " + pvNames.get(i));
                return;
            }

            if (retiredPVTemplate != null) {
                PVTypeInfo templateTypeInfo = PVNames.determineAppropriatePVTypeInfo(retiredPVTemplate,
                        configService);
                if (templateTypeInfo != null) {
                    typeInfos.set(i, new PVTypeInfo(pvNames.get(i), templateTypeInfo));
                    typeInfos.get(i).setPaused(true);
                    typeInfos.get(i).setApplianceIdentity(configService.getMyApplianceInfo().getIdentity());
                    // Somehow tell the code downstream that this is a fake typeInfos.
                    typeInfos.get(i).setSamplingMethod(SamplingMethod.DONT_ARCHIVE);
                    logger.debug("Using a template PV for " + pvNames.get(i)
                            + " Need to determine the actual DBR type.");
                    setActualDBRTypeFromData(pvNames.get(i), typeInfos.get(i), configService);
                }
            }
        }

        if (typeInfos.get(i) == null) {
            String msg = "Unable to find typeinfo for pv " + pvNames.get(i);
            logger.error(msg);
            resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
            resp.sendError(HttpServletResponse.SC_NOT_FOUND, msg);
            return;
        }

        if (postProcessors.get(i) == null) {
            if (useReduced) {
                String defaultPPClassName = configService.getInstallationProperties().getProperty(
                        "org.epics.archiverappliance.retrieval.DefaultUseReducedPostProcessor",
                        FirstSamplePP.class.getName());
                logger.debug("Using the default usereduced preprocessor " + defaultPPClassName);
                try {
                    postProcessors.set(i, (PostProcessor) Class.forName(defaultPPClassName).newInstance());
                } catch (Exception ex) {
                    logger.error("Exception constructing new instance of post processor " + defaultPPClassName,
                            ex);
                    postProcessors.set(i, null);
                }
            }
        }

        if (postProcessors.get(i) == null) {
            logger.debug("Using the default raw preprocessor");
            postProcessors.set(i, new DefaultRawPostProcessor());
        }
    }

    // Get the appliances for each of the PVs
    List<ApplianceInfo> applianceForPVs = new ArrayList<ApplianceInfo>(pvNames.size());
    for (int i = 0; i < pvNames.size(); i++) {
        applianceForPVs.add(configService.getApplianceForPV(pvNames.get(i)));
        if (applianceForPVs.get(i) == null) {
            // TypeInfo cannot be null here...
            assert (typeInfos.get(i) != null);
            applianceForPVs.set(i, configService.getAppliance(typeInfos.get(i).getApplianceIdentity()));
        }
    }

    /*
     * Retrieving the external appliances if the current appliance has not got the PV assigned to it, and
     * storing the associated information of the PVs in that appliance.
     */
    Map<String, ArrayList<PVInfoForClusterRetrieval>> applianceToPVs = new HashMap<String, ArrayList<PVInfoForClusterRetrieval>>();
    for (int i = 0; i < pvNames.size(); i++) {
        if (!applianceForPVs.get(i).equals(configService.getMyApplianceInfo())) {

            ArrayList<PVInfoForClusterRetrieval> appliancePVs = applianceToPVs
                    .get(applianceForPVs.get(i).getMgmtURL());
            appliancePVs = (appliancePVs == null) ? new ArrayList<>() : appliancePVs;
            PVInfoForClusterRetrieval pvInfoForRetrieval = new PVInfoForClusterRetrieval(pvNames.get(i),
                    typeInfos.get(i), postProcessors.get(i), applianceForPVs.get(i));
            appliancePVs.add(pvInfoForRetrieval);
            applianceToPVs.put(applianceForPVs.get(i).getRetrievalURL(), appliancePVs);
        }
    }

    List<List<Future<EventStream>>> listOfEventStreamFuturesLists = new ArrayList<List<Future<EventStream>>>();
    Set<String> retrievalURLs = applianceToPVs.keySet();
    if (retrievalURLs.size() > 0) {
        // Get list of PVs and redirect them to appropriate appliance to be retrieved.
        String retrievalURL;
        ArrayList<PVInfoForClusterRetrieval> pvInfos;
        while (!((retrievalURL = retrievalURLs.iterator().next()) != null)) {
            // Get array list of PVs for appliance
            pvInfos = applianceToPVs.get(retrievalURL);
            try {
                List<List<Future<EventStream>>> resultFromForeignAppliances = retrieveEventStreamFromForeignAppliance(
                        req, resp, pvInfos, requestTimes, useChunkedEncoding,
                        retrievalURL + "/../data/getDataForPVs.raw", start, end);
                listOfEventStreamFuturesLists.addAll(resultFromForeignAppliances);
            } catch (Exception ex) {
                logger.error("Failed to retrieve " + StringUtils.join(pvNames, ", ") + " from " + retrievalURL
                        + ".");
                return;
            }
        }
    }

    pmansProfiler.mark("After Appliance Info");

    // Setting post processor for PVs, taking into account whether there is a field in the PV name
    List<String> pvNamesFromRequests = new ArrayList<String>(pvNames.size());
    for (int i = 0; i < pvNames.size(); i++) {
        String pvName = pvNames.get(i);
        pvNamesFromRequests.add(pvName);
        PVTypeInfo typeInfo = typeInfos.get(i);
        postProcessorUserArg = postProcessorUserArgs.get(i);

        // If a field is specified in a PV name, it will create a post processor for that
        String fieldName = PVNames.getFieldName(pvName);
        if (fieldName != null && !fieldName.equals("") && !pvName.equals(typeInfo.getPvName())) {
            logger.debug("We reset the pvName " + pvName + " to one from the typeinfo " + typeInfo.getPvName()
                    + " as that determines the name of the stream. " + "Also using ExtraFieldsPostProcessor.");
            pvNames.set(i, typeInfo.getPvName());
            postProcessors.set(i, new ExtraFieldsPostProcessor(fieldName));
        }

        try {
            // Postprocessors get their mandatory arguments from the request.
            // If user does not pass in the expected request, throw an exception.
            postProcessors.get(i).initialize(postProcessorUserArg, pvName);
        } catch (Exception ex) {
            String msg = "Postprocessor threw an exception during initialization for " + pvName;
            logger.error(msg, ex);
            resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
            resp.sendError(HttpServletResponse.SC_NOT_FOUND, msg);
            return;
        }
    }

    /*
     * MergeDedupConsumer is what writes PB data in its respective format to the HTML response.
     * The response, after the MergeDedupConsumer is created, contains the following:
     * 
     * 1) The content type for the response.
     * 2) Any additional headers for the particular MIME response.
     * 
     * Additionally, the MergeDedupConsumer instance holds a reference to the output stream
     * that is used to write to the HTML response. It is stored under the name `os`.
     */
    MergeDedupConsumer mergeDedupCountingConsumer;
    try {
        mergeDedupCountingConsumer = createMergeDedupConsumer(resp, extension, useChunkedEncoding);
    } catch (ServletException se) {
        String msg = "Exception when retrieving data " + "-->" + se.toString();
        logger.error(msg, se);
        resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
        resp.sendError(HttpServletResponse.SC_SERVICE_UNAVAILABLE, msg);
        return;
    }

    /* 
     * BasicContext contains the PV name and the expected return type. Used to access PB files.
     * RetrievalExecutorResult contains a thread service class and the time spans Presumably, the 
     * thread service is what retrieves the data, and the BasicContext is the context in which it 
     * works.
     */
    List<HashMap<String, String>> engineMetadatas = new ArrayList<HashMap<String, String>>();
    try {
        List<BasicContext> retrievalContexts = new ArrayList<BasicContext>(pvNames.size());
        List<RetrievalExecutorResult> executorResults = new ArrayList<RetrievalExecutorResult>(pvNames.size());
        for (int i = 0; i < pvNames.size(); i++) {
            if (fetchLatestMetadata) {
                // Make a call to the engine to fetch the latest metadata.
                engineMetadatas.add(fetchLatestMedataFromEngine(pvNames.get(i), applianceForPVs.get(i)));
            }
            retrievalContexts.add(new BasicContext(typeInfos.get(i).getDBRType(), pvNamesFromRequests.get(i)));
            executorResults.add(determineExecutorForPostProcessing(pvNames.get(i), typeInfos.get(i),
                    requestTimes, req, postProcessors.get(i)));
        }

        /*
         * There are as many Future objects in the eventStreamFutures List as there are periods over 
         * which to fetch data. Retrieval of data happen here in parallel.
         */
        List<LinkedList<Future<RetrievalResult>>> listOfRetrievalResultFuturesLists = new ArrayList<LinkedList<Future<RetrievalResult>>>();
        for (int i = 0; i < pvNames.size(); i++) {
            listOfRetrievalResultFuturesLists.add(resolveAllDataSources(pvNames.get(i), typeInfos.get(i),
                    postProcessors.get(i), applianceForPVs.get(i), retrievalContexts.get(i),
                    executorResults.get(i), req, resp));
        }
        pmansProfiler.mark("After data source resolution");

        for (int i = 0; i < pvNames.size(); i++) {
            // Data is retrieved here
            List<Future<EventStream>> eventStreamFutures = getEventStreamFuturesFromRetrievalResults(
                    executorResults.get(i), listOfRetrievalResultFuturesLists.get(i));
            listOfEventStreamFuturesLists.add(eventStreamFutures);
        }

    } catch (Exception ex) {
        if (ex != null && ex.toString() != null && ex.toString().contains("ClientAbortException")) {
            // We check for ClientAbortException etc this way to avoid including tomcat jars in the build path.
            logger.debug("Exception when retrieving data ", ex);
        } else {
            logger.error("Exception when retrieving data " + "-->" + ex.toString(), ex);
        }
    }

    long s1 = System.currentTimeMillis();
    String currentlyProcessingPV = null;

    /*
     * The following try bracket goes through each of the streams in the list of event stream futures.
     * 
     * It is intended that the process goes through one PV at a time.
     */
    try {
        for (int i = 0; i < pvNames.size(); i++) {
            List<Future<EventStream>> eventStreamFutures = listOfEventStreamFuturesLists.get(i);
            String pvName = pvNames.get(i);
            PVTypeInfo typeInfo = typeInfos.get(i);
            HashMap<String, String> engineMetadata = fetchLatestMetadata ? engineMetadatas.get(i) : null;
            PostProcessor postProcessor = postProcessors.get(i);

            logger.debug("Done with the RetrievalResults; moving onto the individual event stream "
                    + "from each source for " + StringUtils.join(pvNames, ", "));
            pmansProfiler.mark("After retrieval results");
            for (Future<EventStream> future : eventStreamFutures) {
                EventStreamDesc sourceDesc = null;

                // Gets the result of a data retrieval
                try (EventStream eventStream = future.get()) {
                    sourceDesc = null; // Reset it for each loop iteration.
                    sourceDesc = eventStream.getDescription();
                    if (sourceDesc == null) {
                        logger.warn("Skipping event stream without a desc for pv " + pvName);
                        continue;
                    }

                    logger.debug("Processing event stream for pv " + pvName + " from source "
                            + ((eventStream.getDescription() != null) ? eventStream.getDescription().getSource()
                                    : " unknown"));

                    try {
                        mergeTypeInfo(typeInfo, sourceDesc, engineMetadata);
                    } catch (MismatchedDBRTypeException mex) {
                        logger.error(mex.getMessage(), mex);
                        continue;
                    }

                    if (currentlyProcessingPV == null || !currentlyProcessingPV.equals(pvName)) {
                        logger.debug("Switching to new PV " + pvName + " In some mime responses we insert "
                                + "special headers at the beginning of the response. Calling the hook for "
                                + "that");
                        currentlyProcessingPV = pvName;
                        /*
                         * Goes through the PB data stream over a period of time. The relevant MIME response
                         * actually deal with the processing of the PV. `start` and `end` refer to the very
                         * beginning and very end of the time period being retrieved over, regardless of
                         * whether it is divided up or not.
                         */
                        mergeDedupCountingConsumer.processingPV(currentlyProcessingPV, start, end,
                                (eventStream != null) ? sourceDesc : null);
                    }

                    try {
                        // If the postProcessor does not have a consolidated event stream, we send each eventstream across as we encounter it.
                        // Else we send the consolidatedEventStream down below.
                        if (!(postProcessor instanceof PostProcessorWithConsolidatedEventStream)) {
                            /*
                             * The eventStream object contains all the data over the current period.
                             */
                            mergeDedupCountingConsumer.consumeEventStream(eventStream);
                            resp.flushBuffer();
                        }
                    } catch (Exception ex) {
                        if (ex != null && ex.toString() != null
                                && ex.toString().contains("ClientAbortException")) {
                            // We check for ClientAbortException etc this way to avoid including tomcat jars in the build path.
                            logger.debug(
                                    "Exception when consuming and flushing data from " + sourceDesc.getSource(),
                                    ex);
                        } else {
                            logger.error("Exception when consuming and flushing data from "
                                    + sourceDesc.getSource() + "-->" + ex.toString(), ex);
                        }
                    }
                    pmansProfiler.mark("After event stream " + eventStream.getDescription().getSource());
                } catch (Exception ex) {
                    if (ex != null && ex.toString() != null && ex.toString().contains("ClientAbortException")) {
                        // We check for ClientAbortException etc this way to avoid including tomcat jars in the build path.
                        logger.debug("Exception when consuming and flushing data from "
                                + (sourceDesc != null ? sourceDesc.getSource() : "N/A"), ex);
                    } else {
                        logger.error("Exception when consuming and flushing data from "
                                + (sourceDesc != null ? sourceDesc.getSource() : "N/A") + "-->" + ex.toString(),
                                ex);
                    }
                }
            }

            // TODO Go through data from other appliances here

            if (postProcessor instanceof PostProcessorWithConsolidatedEventStream) {
                try (EventStream eventStream = ((PostProcessorWithConsolidatedEventStream) postProcessor)
                        .getConsolidatedEventStream()) {
                    EventStreamDesc sourceDesc = eventStream.getDescription();
                    if (sourceDesc == null) {
                        logger.error("Skipping event stream without a desc for pv " + pvName
                                + " and post processor " + postProcessor.getExtension());
                    } else {
                        mergeDedupCountingConsumer.consumeEventStream(eventStream);
                        resp.flushBuffer();
                    }
                }
            }

            // If the postProcessor needs to send final data across, give it a chance now...
            if (postProcessor instanceof AfterAllStreams) {
                EventStream finalEventStream = ((AfterAllStreams) postProcessor).anyFinalData();
                if (finalEventStream != null) {
                    mergeDedupCountingConsumer.consumeEventStream(finalEventStream);
                    resp.flushBuffer();
                }
            }

            pmansProfiler.mark("After writing all eventstreams to response");
        }
    } catch (Exception ex) {
        if (ex != null && ex.toString() != null && ex.toString().contains("ClientAbortException")) {
            // We check for ClientAbortException etc this way to avoid including tomcat jars in the build path.
            logger.debug("Exception when retrieving data ", ex);
        } else {
            logger.error("Exception when retrieving data " + "-->" + ex.toString(), ex);
        }
    }

    long s2 = System.currentTimeMillis();
    logger.info("For the complete request, found a total of " + mergeDedupCountingConsumer.totalEventsForAllPVs
            + " in " + (s2 - s1) + "(ms)" + " skipping " + mergeDedupCountingConsumer.skippedEventsForAllPVs
            + " events" + " deduping involved " + mergeDedupCountingConsumer.comparedEventsForAllPVs
            + " compares.");

    pmansProfiler.mark("After all closes and flushing all buffers");

    // Till we determine all the if conditions where we log this, we log sparingly..
    if (pmansProfiler.totalTimeMS() > 5000) {
        logger.error("Retrieval time for " + StringUtils.join(pvNames, ", ") + " from " + startTimeStr + " to "
                + endTimeStr + ": " + pmansProfiler.toString());
    }

    mergeDedupCountingConsumer.close();
}