Example usage for java.util LinkedList getFirst

List of usage examples for java.util LinkedList getFirst

Introduction

In this page you can find the example usage for java.util LinkedList getFirst.

Prototype

public E getFirst() 

Source Link

Document

Returns the first element in this list.

Usage

From source file:com.cablelabs.sim.PCSim2.java

/**
 * This method determines whether the global registration needs to be started
 * //  w  w w  .j a  va2s  .  c o m
 */
private boolean configGlobalRegistrar() {
    globalRegEnabled = SystemSettings.getBooleanSetting("Global Registrar");
    if (globalRegEnabled) {
        Properties platform = SystemSettings.getSettings(SettingConstants.PLATFORM);
        String grName = platform.getProperty(SettingConstants.GLOBAL_REGISTRAR_FSM);
        if (globalRegFile == null) {
            globalRegFile = grName;
        } else if (globalRegFile.equals(grName)) {
            return true;
        } else {
            if (stacks != null)
                stacks.shutdownGlobalRegistrars();
            globalRegFile = grName;
        }
        if (globalRegFile != null) {
            File gr = new File(globalRegFile);
            if (gr != null && gr.exists() && gr.canRead() && gr.isFile()) {
                TSParser tsp = new TSParser(false);
                try {
                    logger.info(PC2LogCategory.Parser, subCat,
                            "Parsing document " + globalRegFile + " for GlobalRegistrar processing.");
                    TSDocument grDoc = tsp.parse(globalRegFile);
                    LinkedList<FSM> fsms = grDoc.getFsms();
                    if (fsms.size() == 1) {
                        // Initialize the settings that can be overwritten from
                        // within the document 
                        setExtensions(fsms);
                        FSM grFsm = fsms.getFirst();
                        String transport = grFsm.getModel().getProperty(SettingConstants.TRANSPORT_PROTOCOL);
                        Transport t = Transport.UDP;
                        if (transport != null) {
                            if (transport.equals(Transport.UDP.toString()))
                                t = Transport.UDP;
                            else if (transport.equals(Transport.TCP.toString()))
                                t = Transport.TCP;
                            else if (transport.equals(Transport.TLS.toString()))
                                t = Transport.TLS;
                        } else {
                            if (platform != null) {
                                transport = platform.getProperty(SettingConstants.SIP_DEF_TRANPORT_PROTOCOL);
                                if (transport != null) {
                                    if (transport.equals(Transport.UDP.toString()))
                                        t = Transport.UDP;
                                    else if (transport.equals(Transport.TCP.toString()))
                                        t = Transport.TCP;
                                    else if (transport.equals(Transport.TLS.toString()))
                                        t = Transport.TLS;
                                }
                            }
                        }
                        GlobalRegistrar.setMasterFSM(grFsm, t);

                        return true;

                    }
                } catch (PC2XMLException pe) {
                    String err = "\n** Parsing error in file \n    " + pe.getFileName() + " at line "
                            + pe.getLineNumber();
                    if (pe.getSystemId() != null) {
                        err += ", uri " + pe.getSystemId();
                    }
                    if (pe.getPublicId() != null) {
                        err += ", public " + pe.getPublicId();
                    }
                    err += "\n";

                    logger.fatal(PC2LogCategory.Parser, subCat, err, pe);
                } catch (SAXParseException spe) {
                    String err = "\n** Parsing error in file \n    " + globalRegFile + " at line "
                            + spe.getLineNumber();
                    if (spe.getSystemId() != null) {
                        err += ", uri " + spe.getSystemId();
                    }
                    if (spe.getPublicId() != null) {
                        err += ", public " + spe.getPublicId();
                    }
                    err += "\n";

                    logger.fatal(PC2LogCategory.Parser, subCat, err, spe);
                } catch (Exception e) {
                    e.printStackTrace();
                }
            } else {
                //               if (gr == null) {
                //                  logger.fatal(PC2LogCategory.Parser, subCat,
                //                        "The platform configuration file doesn't appear to have a " 
                //                        + "value for the \"Global Registrar FSM\" setting.");
                //               }
                if (!gr.exists()) {
                    logger.fatal(PC2LogCategory.Parser, subCat, "The \"Global Registrar FSM\" setting=[" + gr
                            + "] doesn't appear to define a valid path or file name.");
                }
                if (!gr.canRead()) {
                    logger.fatal(PC2LogCategory.Parser, subCat,
                            "The \"Global Registrar FSM\" setting=[" + gr + "] can not be read by the system.");
                }
                if (!gr.isFile()) {
                    logger.fatal(PC2LogCategory.Parser, subCat, "The \"Global Registrar FSM\" setting=[" + gr
                            + "] doesn't appear to define a file.");
                }
            }

        }
    }
    return false;
}

From source file:org.dbpedia.spotlight.mediawiki.ModularParser.java

/**
 * Building a ContentElement, this funciton is calles by all the other
 * parseContentElement(..) functions//from  ww  w  . j a  v a 2 s .  c om
 */
private ContentElement parseContentElement(SpanManager sm, ContentElementParsingParameters cepp,
        LinkedList<Span> lineSpans, ContentElement result) {

    List<Link> localLinks = new ArrayList<Link>();
    List<Template> localTemplates = new ArrayList<Template>();

    List<Span> boldSpans = new ArrayList<Span>();
    List<Span> italicSpans = new ArrayList<Span>();
    sm.manageList(boldSpans);
    sm.manageList(italicSpans);

    List<Span> managedSpans = new ArrayList<Span>();
    sm.manageList(managedSpans);

    Span contentElementRange = new Span(lineSpans.getFirst().getStart(), lineSpans.getLast().getEnd()).trim(sm);
    managedSpans.add(contentElementRange);

    // set the SrcSpan
    if (calculateSrcSpans) {
        result.setSrcSpan(new SrcSpan(sm.getSrcPos(contentElementRange.getStart()),
                sm.getSrcPos(contentElementRange.getEnd())));
    }

    sm.manageList(lineSpans);
    while (!lineSpans.isEmpty()) {
        Span line = lineSpans.getFirst();

        parseBoldAndItalicSpans(sm, line, boldSpans, italicSpans);

        // External links
        parseExternalLinks(sm, line, "http://", managedSpans, localLinks, result);
        parseExternalLinks(sm, line, "https://", managedSpans, localLinks, result);
        parseExternalLinks(sm, line, "ftp://", managedSpans, localLinks, result);
        parseExternalLinks(sm, line, "mailto:", managedSpans, localLinks, result);

        // end of linewhise opperations
        lineSpans.removeFirst();
    }
    sm.removeManagedList(lineSpans);

    // Links
    int i;
    i = 0;
    while (i < cepp.linkSpans.size()) {
        if (contentElementRange.hits(cepp.linkSpans.get(i))) {
            Span linkSpan = cepp.linkSpans.remove(i);
            managedSpans.add(linkSpan);
            Link l = cepp.links.remove(i).setHomeElement(result);
            localLinks.add(l);
            if (!showImageText && l.getType() == Link.type.IMAGE) {
                // deletes the Image Text from the ContentElement Text.
                sm.delete(linkSpan);
            }
        } else {
            i++;
        }
    }

    // Templates
    //DBPedia - Spotlight. Removing the boiler plate logic from the wikitext
    //Commenting the Templates Logic
    /*      i = 0;
          while (i < cepp.templateSpans.size())
          {
             Span ts = cepp.templateSpans.get(i);
             if (contentElementRange.hits(ts))
             {
    ResolvedTemplate rt = cepp.templates.remove(i);
            
    if (rt.getPostParseReplacement() != null)
    {
       sm.replace(ts, rt.getPostParseReplacement());
    }
    cepp.templateSpans.remove(i);
            
    Object parsedObject = rt.getParsedObject();
    if (parsedObject != null)
    {
       managedSpans.add(ts);
            
       Class parsedObjectClass = parsedObject.getClass();
       if (parsedObjectClass == Template.class)
       {
          localTemplates.add((Template) parsedObject);
       }
       else if (parsedObjectClass == Link.class)
       {
          localLinks.add(((Link) parsedObject)
                .setHomeElement(result));
       }
       else
       {
          localTemplates.add(rt.getTemplate());
       }
    }
             }
             else
             {
    i++;
             }
          }
    */
    // HTML/XML Tags
    i = 0;
    List<Span> tags = new ArrayList<Span>();
    while (i < cepp.tagSpans.size()) {
        Span s = cepp.tagSpans.get(i);
        if (contentElementRange.hits(s)) {
            cepp.tagSpans.remove(i);
            if (deleteTags) {
                sm.delete(s);
            } else {
                tags.add(s);
                managedSpans.add(s);
            }
        } else {
            i++;
        }
    }

    // noWiki
    i = 0;
    List<Span> localNoWikiSpans = new ArrayList<Span>();
    while (i < cepp.noWikiSpans.size()) {
        Span s = cepp.noWikiSpans.get(i);
        if (contentElementRange.hits(s)) {
            cepp.noWikiSpans.remove(i);
            sm.replace(s, cepp.noWikiStrings.remove(i));
            localNoWikiSpans.add(s);
            managedSpans.add(s);
        } else {
            i++;
        }
    }

    // MATH Tags
    i = 0;
    List<Span> mathSpans = new ArrayList<Span>();
    while (i < cepp.mathSpans.size()) {
        Span s = cepp.mathSpans.get(i);
        if (contentElementRange.hits(s)) {
            cepp.mathSpans.remove(i);

            if (showMathTagContent) {
                mathSpans.add(s);
                managedSpans.add(s);
                sm.replace(s, cepp.mathStrings.remove(i));
            } else {
                sm.delete(s);
            }
        } else {
            i++;
        }
    }

    result.setText(sm.substring(contentElementRange));

    // managed spans must be removed here and not earlier, because every
    // change in the SpanManager affects the Spans!
    sm.removeManagedList(boldSpans);
    sm.removeManagedList(italicSpans);
    sm.removeManagedList(managedSpans);

    // contentElementRange ist auch noch in managedSpans !!! deswegen:
    final int adjust = -contentElementRange.getStart();
    for (Span s : boldSpans) {
        s.adjust(adjust);
    }
    for (Span s : italicSpans) {
        s.adjust(adjust);
    }
    for (Span s : managedSpans) {
        s.adjust(adjust);
    }

    result.setFormatSpans(FormatType.BOLD, boldSpans);
    result.setFormatSpans(FormatType.ITALIC, italicSpans);
    result.setFormatSpans(FormatType.TAG, tags);
    result.setFormatSpans(FormatType.MATH, mathSpans);
    result.setFormatSpans(FormatType.NOWIKI, localNoWikiSpans);

    result.setLinks(sortLinks(localLinks));
    result.setTemplates(sortTemplates(localTemplates));

    return result;
}

From source file:cnedu.ustcjd.widget.MultiSlider.java

@Override
public boolean onTouchEvent(MotionEvent event) {
    if (!mIsUserSeekable || !isEnabled()) {
        return false;
    }//from w  w w.  j  av a  2 s  .co  m
    final int xx = Math.round(event.getX());
    final int yy = Math.round(event.getY());

    int pointerIdx = event.getActionIndex();

    Thumb currThumb = null;
    if (event.getActionMasked() == MotionEvent.ACTION_DOWN
            || event.getActionMasked() == MotionEvent.ACTION_POINTER_DOWN) {
        LinkedList<Thumb> closestOnes = getClosestThumb((int) event.getX(pointerIdx));

        if (isInScrollingContainer() && mDraggingThumbs.size() == 0 && exactTouched != null && pointerIdx > 0) {
            //we have been here before => we want to use the bar
            Thumb prevThumb = exactTouched.getFirst();
            onStartTrackingTouch(prevThumb);
            exactTouched = null;
        }

        if (closestOnes != null && !closestOnes.isEmpty()) {
            if (closestOnes.size() == 1) {
                currThumb = closestOnes.getFirst();
                if (isInScrollingContainer() && mDraggingThumbs.size() == 0) {
                    exactTouched = closestOnes;
                }
            } else {
                //we have more than one thumb at the same place and we touched there
                exactTouched = closestOnes;
            }
        }
    } else if (event.getActionMasked() == MotionEvent.ACTION_MOVE) {
        if (exactTouched != null && !exactTouched.isEmpty()) {
            currThumb = getMostMovableThumb(event);
            //check if move actually changed value
            // if (currThumb == null) return false;
        } else if (mDraggingThumbs.size() > pointerIdx) {
            currThumb = mDraggingThumbs.get(pointerIdx);
        }
    } else if (event.getActionMasked() == MotionEvent.ACTION_UP
            || event.getActionMasked() == MotionEvent.ACTION_POINTER_UP) {
        if (mDraggingThumbs.size() > pointerIdx) {
            currThumb = mDraggingThumbs.get(pointerIdx);
        } //else we had a candidate but was never tracked
        else if (exactTouched != null && exactTouched.size() > 0) {
            currThumb = getMostMovableThumb(event);
            exactTouched = null;
        }
    }
    //        else {
    //            LinkedList<Thumb> closestOnes = getClosestThumb((int) event.getX());
    //            currThumb = closestOnes.getFirst();
    //        }

    switch (event.getActionMasked()) {
    case MotionEvent.ACTION_DOWN:
    case MotionEvent.ACTION_POINTER_DOWN:
        if (isInScrollingContainer() && mDraggingThumbs.size() == 0) {
            mTouchDownX = event.getX(pointerIdx);
        } else {
            onStartTrackingTouch(currThumb);
            setThumbValue(currThumb, getValue(event, currThumb), true);
            setHotspot(xx, yy, currThumb);
        }
        break;
    //with move we dont have pointer action so set them all
    case MotionEvent.ACTION_MOVE:
        if (mDraggingThumbs.contains(currThumb)) {
            //need the index
            for (int i = 0; i < mDraggingThumbs.size(); i++) {
                if (mDraggingThumbs.get(i) != null && mDraggingThumbs.get(i).getThumb() != null) {
                    invalidate(mDraggingThumbs.get(i).getThumb().getBounds());
                }
                setThumbValue(mDraggingThumbs.get(i), getValue(event, i, mDraggingThumbs.get(i)), true);

            }
            setHotspot(xx, yy, currThumb);
        } else {
            final float x = event.getX(pointerIdx);
            if (Math.abs(x - mTouchDownX) > mScaledTouchSlop) {
                onStartTrackingTouch(currThumb);
                exactTouched = null;
                setThumbValue(currThumb, getValue(event, currThumb), true);
                setHotspot(xx, yy, currThumb);
            }
        }

        break;

    case MotionEvent.ACTION_UP:
        setPressed(false);
        //there are other pointers left
    case MotionEvent.ACTION_POINTER_UP:
        if (currThumb != null) {
            boolean toUnPress = false;
            if (!isPressed()) {
                setPressed(true);
                toUnPress = true;
            }
            setThumbValue(currThumb, getValue(event, currThumb), true);
            setHotspot(xx, yy, currThumb);
            onStopTrackingTouch(currThumb);
            if (toUnPress) {
                setPressed(false);
            }
        } else {
            //                    currThumb = getClosestThumb(newValue);
            //                    // Touch up when we never crossed the touch slop threshold should
            //                    // be interpreted as a tap-seek to that location.
            //                    onStartTrackingTouch(currThumb);
            //                    setThumbValue(currThumb, newValue, true);
            //                    onStopTrackingTouch(currThumb);
        }
        // ProgressBar doesn't know to repaint the thumb drawable
        // in its inactive state when the touch stops (because the
        // value has not apparently changed)
        invalidate();
        break;
    case MotionEvent.ACTION_CANCEL:
        if (mDraggingThumbs != null) {
            onStopTrackingTouch();
            setPressed(false);
        }
        invalidate(); // see above explanation
        break;
    }
    return true;
}

From source file:com.mirth.connect.server.controllers.MuleEngineController.java

private void configureInboundRouter(UMODescriptor descriptor, Channel channel) throws Exception {
    logger.debug("configuring inbound router for channel: " + channel.getId() + " (" + channel.getName() + ")");
    InboundMessageRouter inboundRouter = new InboundMessageRouter();
    Exception exceptionRegisteringInboundRouter = null;

    MuleEndpoint endpoint = new MuleEndpoint();
    String connectorReference = getConnectorReferenceForInboundRouter(channel);

    // Check if the channel is synchronous
    if ((channel.getProperties().get("synchronous")) != null
            && ((String) channel.getProperties().get("synchronous")).equalsIgnoreCase("true")) {
        endpoint.setSynchronous(true);/*from   w w w .  j  av  a  2s  .  c  om*/
    }

    // STEP 1. append the default transformers required by the transport
    // (ex. ByteArrayToString)
    ConnectorMetaData transport = transports.get(channel.getSourceConnector().getTransportName());
    LinkedList<UMOTransformer> transformerList = null;

    if (transport.getTransformers() != null) {
        transformerList = chainTransformers(transport.getTransformers());
    }

    // STEP 2. append the preprocessing transformer
    UMOTransformer preprocessorTransformer = createPreprocessor(channel, connectorReference + "_preprocessor");

    try {
        muleManager.registerTransformer(preprocessorTransformer);
    } catch (Exception e) {
        exceptionRegisteringInboundRouter = e;
    }

    if (!transformerList.isEmpty()) {
        transformerList.getLast().setTransformer(preprocessorTransformer);
    } else {
        // there were no default transformers, so make the preprocessor
        // the first transformer in the list
        transformerList.add(preprocessorTransformer);
    }

    // STEP 3. finally, append the JavaScriptTransformer that does the
    // mappings
    UMOTransformer javascriptTransformer = createTransformer(channel, channel.getSourceConnector(),
            connectorReference + "_transformer");

    try {
        muleManager.registerTransformer(javascriptTransformer);
    } catch (Exception e) {
        exceptionRegisteringInboundRouter = e;
    }

    preprocessorTransformer.setTransformer(javascriptTransformer);

    // STEP 4. add the transformer sequence as an attribute to the endpoint
    endpoint.setTransformer(transformerList.getFirst());

    SelectiveConsumer selectiveConsumerRouter = new SelectiveConsumer();
    selectiveConsumerRouter.setFilter(new ValidMessageFilter());
    inboundRouter.addRouter(selectiveConsumerRouter);

    String endpointUri = getEndpointUri(channel.getSourceConnector());

    /*
     * NOTE: Even though every channel already has a VM Connector, we still
     * need to add a Channel Reader connector because of its possible
     * additional properties like "respond from". If a channel reader is
     * being used, add the channel id to the endpointUri so the endpoint can
     * be deployed.
     * 
     * Set the endpoint name to the channelId so
     * InboundMessageRouter#route(UMOEvent event) gets the right channel id.
     */
    if (endpointUri.equals("vm://")) {
        endpointUri += channel.getId();
        endpoint.setName(channel.getId());
        endpoint.setCreateConnector(1);
    } else {
        // add source endpoints
        MuleEndpoint vmEndpoint = new MuleEndpoint();
        vmEndpoint.setEndpointURI(new MuleEndpointURI(new URI("vm://" + channel.getId()).toString()));
        vmEndpoint.setTransformer(preprocessorTransformer);

        /*
         * XXX: Set create connector to true so that channel readers will
         * not use an existing connector (one from a different channel). Not
         * entirely sure why this is required, but if this is set to 0 then
         * a VM EndpointService mbean is created, and when undeploying
         * channels a null pointer is sometimes thrown when calling
         * unregisterComponent(descriptor). The error occurs in
         * AbstractConnector.unregisterListener because receivers is null.
         */
        vmEndpoint.setCreateConnector(1);
        inboundRouter.addEndpoint(vmEndpoint);
    }

    endpoint.setEndpointURI(new MuleEndpointURI(endpointUri, channel.getId()));

    /*
     * MUST BE LAST STEP: Add the source connector last so that if an
     * exception occurs (like creating the URI) it wont register the JMX
     * service.
     * 
     * If there are any exceptions registering the connector, still add the
     * endpoint and inbound router so that the channel can be properly
     * unregistered.
     */
    try {
        endpoint.setConnector(registerConnector(channel.getSourceConnector(),
                getConnectorNameForRouter(connectorReference), channel.getId()));
    } catch (Exception e) {
        exceptionRegisteringInboundRouter = e;
    }

    inboundRouter.addEndpoint(endpoint);

    descriptor.setInboundRouter(inboundRouter);

    if (exceptionRegisteringInboundRouter != null) {
        throw exceptionRegisteringInboundRouter;
    }
}

From source file:tokyo.northside.jrst.JRSTReader.java

/**
 * @param title/* w w  w.ja  v  a2 s.c om*/
 *            <Element> title, String num
 * @return Element
 */
private Element composeLineContent(LinkedList<Element> title, String num) {
    Element result = DocumentHelper.createElement(BULLET_LIST);
    if (sectnum) {
        result.addAttribute(CLASS, "auto-toc");
    }
    Element item = null;
    int cnt = 0;
    while (!title.isEmpty()) {

        Element e = title.getFirst();
        int level = Integer.parseInt(e.attributeValue(LEVEL));
        LinkedList<Element> child = new LinkedList<Element>();

        if (level <= 0) {
            cnt++;
            title.removeFirst();
            item = result.addElement(LIST_ITEM);
            Element para = item.addElement(PARAGRAPH);
            Element reference = para.addElement(REFERENCE);
            String text = e.getText();
            String id = e.attributeValue(ATTR_REFID);
            reference.addAttribute(ATTR_IDS, id);
            reference.addAttribute(ATTR_REFID,
                    text.replaceAll("\\W+", " ").trim().toLowerCase().replaceAll("\\W+", "-"));
            // si l'on doit les numeroter
            if (sectnum) {
                Element generated = reference.addElement(GENERATED).addAttribute(CLASS, SECTNUM);
                generated.setText(num + cnt + "   ");
                for (int i = 0; i < eTitle.size(); i++) {
                    if (eTitle.get(i).attributeValue(ATTR_REFID).equals(id)) {
                        Element generatedTitle = eTitle.get(i).addElement(GENERATED);
                        generatedTitle.addAttribute(CLASS, SECTNUM);
                        generatedTitle.setText(num + cnt + "   ");
                    }

                }
            }

            text = text.trim();
            text = text.replaceAll("_", "");

            text = REGEX_STRONG.matcher(text).replaceAll("<" + STRONG + ">$1</" + STRONG + ">");
            text = REGEX_EMPHASIS.matcher(text).replaceAll("<" + EMPHASIS + ">$1</" + EMPHASIS + ">");

            try {
                Element textElement = DocumentHelper.parseText("<TMP>" + text + "</TMP>").getRootElement();
                reference.appendContent(textElement);

            } catch (DocumentException eee) {
                if (log.isWarnEnabled()) {
                    log.warn("Can't inline text for " + e, eee);
                }
            }

        } else {
            do {
                e.addAttribute(LEVEL, "" + (level - 1));
                child.add(e);
                title.removeFirst();
                if (!title.isEmpty()) {
                    e = title.getFirst();
                    level = Integer.parseInt(e.attributeValue(LEVEL));
                }
            } while (!title.isEmpty() && level > 0);
            String numTmp = "";
            // numerotation
            if (sectnum) {
                numTmp = num + cnt + ".";
            }
            if (item != null) {
                item.add(composeLineContent(child, numTmp)); // Appel
                // recursif
            } else {
                result.add(composeLineContent(child, numTmp)); // Appel
                // recursif
            }
        }
    }
    return result;
}

From source file:org.epics.archiverappliance.retrieval.DataRetrievalServlet.java

private void doGetSinglePV(HttpServletRequest req, HttpServletResponse resp)
        throws ServletException, IOException {

    PoorMansProfiler pmansProfiler = new PoorMansProfiler();
    String pvName = req.getParameter("pv");

    if (configService.getStartupState() != STARTUP_SEQUENCE.STARTUP_COMPLETE) {
        String msg = "Cannot process data retrieval requests for PV " + pvName
                + " until the appliance has completely started up.";
        logger.error(msg);/* w  w w.  j  a v a 2  s.c  o m*/
        resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
        resp.sendError(HttpServletResponse.SC_SERVICE_UNAVAILABLE, msg);
        return;
    }

    String startTimeStr = req.getParameter("from");
    String endTimeStr = req.getParameter("to");
    boolean useReduced = false;
    String useReducedStr = req.getParameter("usereduced");
    if (useReducedStr != null && !useReducedStr.equals("")) {
        try {
            useReduced = Boolean.parseBoolean(useReducedStr);
        } catch (Exception ex) {
            logger.error("Exception parsing usereduced", ex);
            useReduced = false;
        }
    }
    String extension = req.getPathInfo().split("\\.")[1];
    logger.info("Mime is " + extension);

    boolean useChunkedEncoding = true;
    String doNotChunkStr = req.getParameter("donotchunk");
    if (doNotChunkStr != null && !doNotChunkStr.equals("false")) {
        logger.info("Turning off HTTP chunked encoding");
        useChunkedEncoding = false;
    }

    boolean fetchLatestMetadata = false;
    String fetchLatestMetadataStr = req.getParameter("fetchLatestMetadata");
    if (fetchLatestMetadataStr != null && fetchLatestMetadataStr.equals("true")) {
        logger.info("Adding a call to the engine to fetch the latest metadata");
        fetchLatestMetadata = true;
    }

    // For data retrieval we need a PV info. However, in case of PV's that have long since retired, we may not want to have PVTypeInfo's in the system.
    // So, we support a template PV that lays out the data sources.
    // During retrieval, you can pass in the PV as a template and we'll clone this and make a temporary copy.
    String retiredPVTemplate = req.getParameter("retiredPVTemplate");

    if (pvName == null) {
        String msg = "PV name is null.";
        resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
        resp.sendError(HttpServletResponse.SC_BAD_REQUEST, msg);
        return;
    }

    if (pvName.equals(ARCH_APPL_PING_PV)) {
        logger.debug("Processing ping PV - this is used to validate the connection with the client.");
        processPingPV(req, resp);
        return;
    }

    if (pvName.endsWith(".VAL")) {
        int len = pvName.length();
        pvName = pvName.substring(0, len - 4);
        logger.info("Removing .VAL from pvName for request giving " + pvName);
    }

    // ISO datetimes are of the form "2011-02-02T08:00:00.000Z"
    Timestamp end = TimeUtils.plusHours(TimeUtils.now(), 1);
    if (endTimeStr != null) {
        try {
            end = TimeUtils.convertFromISO8601String(endTimeStr);
        } catch (IllegalArgumentException ex) {
            try {
                end = TimeUtils.convertFromDateTimeStringWithOffset(endTimeStr);
            } catch (IllegalArgumentException ex2) {
                String msg = "Cannot parse time" + endTimeStr;
                logger.warn(msg, ex2);
                resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
                resp.sendError(HttpServletResponse.SC_BAD_REQUEST, msg);
                return;
            }
        }
    }

    // We get one day by default
    Timestamp start = TimeUtils.minusDays(end, 1);
    if (startTimeStr != null) {
        try {
            start = TimeUtils.convertFromISO8601String(startTimeStr);
        } catch (IllegalArgumentException ex) {
            try {
                start = TimeUtils.convertFromDateTimeStringWithOffset(startTimeStr);
            } catch (IllegalArgumentException ex2) {
                String msg = "Cannot parse time " + startTimeStr;
                logger.warn(msg, ex2);
                resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
                resp.sendError(HttpServletResponse.SC_BAD_REQUEST, msg);
                return;
            }
        }
    }

    if (end.before(start)) {
        String msg = "For request, end " + end.toString() + " is before start " + start.toString() + " for pv "
                + pvName;
        logger.error(msg);
        resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
        resp.sendError(HttpServletResponse.SC_BAD_REQUEST);
        return;
    }

    LinkedList<TimeSpan> requestTimes = new LinkedList<TimeSpan>();

    // We can specify a list of time stamp pairs using the optional timeranges parameter
    String timeRangesStr = req.getParameter("timeranges");
    if (timeRangesStr != null) {
        boolean continueWithRequest = parseTimeRanges(resp, pvName, requestTimes, timeRangesStr);
        if (!continueWithRequest) {
            // Cannot parse the time ranges properly; we so abort the request.
            return;
        }

        // Override the start and the end so that the mergededup consumer works correctly.
        start = requestTimes.getFirst().getStartTime();
        end = requestTimes.getLast().getEndTime();

    } else {
        requestTimes.add(new TimeSpan(start, end));
    }

    assert (requestTimes.size() > 0);

    String postProcessorUserArg = req.getParameter("pp");
    if (pvName.contains("(")) {
        if (!pvName.contains(")")) {
            logger.error("Unbalanced paran " + pvName);
            resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
            resp.sendError(HttpServletResponse.SC_BAD_REQUEST);
            return;
        }
        String[] components = pvName.split("[(,)]");
        postProcessorUserArg = components[0];
        pvName = components[1];
        if (components.length > 2) {
            for (int i = 2; i < components.length; i++) {
                postProcessorUserArg = postProcessorUserArg + "_" + components[i];
            }
        }
        logger.info("After parsing the function call syntax pvName is " + pvName
                + " and postProcessorUserArg is " + postProcessorUserArg);
    }

    PostProcessor postProcessor = PostProcessors.findPostProcessor(postProcessorUserArg);

    PVTypeInfo typeInfo = PVNames.determineAppropriatePVTypeInfo(pvName, configService);
    pmansProfiler.mark("After PVTypeInfo");

    if (typeInfo == null && RetrievalState.includeExternalServers(req)) {
        logger.debug("Checking to see if pv " + pvName + " is served by a external Archiver Server");
        typeInfo = checkIfPVisServedByExternalServer(pvName, start, req, resp, useChunkedEncoding);
    }

    if (typeInfo == null) {
        if (resp.isCommitted()) {
            logger.debug("Proxied the data thru an external server for PV " + pvName);
            return;
        }
    }

    if (typeInfo == null) {
        if (retiredPVTemplate != null) {
            PVTypeInfo templateTypeInfo = PVNames.determineAppropriatePVTypeInfo(retiredPVTemplate,
                    configService);
            if (templateTypeInfo != null) {
                typeInfo = new PVTypeInfo(pvName, templateTypeInfo);
                typeInfo.setPaused(true);
                typeInfo.setApplianceIdentity(configService.getMyApplianceInfo().getIdentity());
                // Somehow tell the code downstream that this is a fake typeInfo.
                typeInfo.setSamplingMethod(SamplingMethod.DONT_ARCHIVE);
                logger.debug("Using a template PV for " + pvName + " Need to determine the actual DBR type.");
                setActualDBRTypeFromData(pvName, typeInfo, configService);
            }
        }
    }

    if (typeInfo == null) {
        logger.error("Unable to find typeinfo for pv " + pvName);
        resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
        resp.sendError(HttpServletResponse.SC_NOT_FOUND);
        return;
    }

    if (postProcessor == null) {
        if (useReduced) {
            String defaultPPClassName = configService.getInstallationProperties().getProperty(
                    "org.epics.archiverappliance.retrieval.DefaultUseReducedPostProcessor",
                    FirstSamplePP.class.getName());
            logger.debug("Using the default usereduced preprocessor " + defaultPPClassName);
            try {
                postProcessor = (PostProcessor) Class.forName(defaultPPClassName).newInstance();
            } catch (Exception ex) {
                logger.error("Exception constructing new instance of post processor " + defaultPPClassName, ex);
                postProcessor = null;
            }
        }
    }

    if (postProcessor == null) {
        logger.debug("Using the default raw preprocessor");
        postProcessor = new DefaultRawPostProcessor();
    }

    ApplianceInfo applianceForPV = configService.getApplianceForPV(pvName);
    if (applianceForPV == null) {
        // TypeInfo cannot be null here...
        assert (typeInfo != null);
        applianceForPV = configService.getAppliance(typeInfo.getApplianceIdentity());
    }

    if (!applianceForPV.equals(configService.getMyApplianceInfo())) {
        // Data for pv is elsewhere. Proxy/redirect and return.
        proxyRetrievalRequest(req, resp, pvName, useChunkedEncoding,
                applianceForPV.getRetrievalURL() + "/../data");
        return;
    }

    pmansProfiler.mark("After Appliance Info");

    String pvNameFromRequest = pvName;

    String fieldName = PVNames.getFieldName(pvName);
    if (fieldName != null && !fieldName.equals("") && !pvName.equals(typeInfo.getPvName())) {
        logger.debug("We reset the pvName " + pvName + " to one from the typeinfo " + typeInfo.getPvName()
                + " as that determines the name of the stream. Also using ExtraFieldsPostProcessor");
        pvName = typeInfo.getPvName();
        postProcessor = new ExtraFieldsPostProcessor(fieldName);
    }

    try {
        // Postprocessors get their mandatory arguments from the request.
        // If user does not pass in the expected request, throw an exception.
        postProcessor.initialize(postProcessorUserArg, pvName);
    } catch (Exception ex) {
        logger.error("Postprocessor threw an exception during initialization for " + pvName, ex);
        resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
        resp.sendError(HttpServletResponse.SC_NOT_FOUND);
        return;
    }

    try (BasicContext retrievalContext = new BasicContext(typeInfo.getDBRType(), pvNameFromRequest);
            MergeDedupConsumer mergeDedupCountingConsumer = createMergeDedupConsumer(resp, extension,
                    useChunkedEncoding);
            RetrievalExecutorResult executorResult = determineExecutorForPostProcessing(pvName, typeInfo,
                    requestTimes, req, postProcessor)) {
        HashMap<String, String> engineMetadata = null;
        if (fetchLatestMetadata) {
            // Make a call to the engine to fetch the latest metadata.
            engineMetadata = fetchLatestMedataFromEngine(pvName, applianceForPV);
        }

        LinkedList<Future<RetrievalResult>> retrievalResultFutures = resolveAllDataSources(pvName, typeInfo,
                postProcessor, applianceForPV, retrievalContext, executorResult, req, resp);
        pmansProfiler.mark("After data source resolution");

        long s1 = System.currentTimeMillis();
        String currentlyProcessingPV = null;

        List<Future<EventStream>> eventStreamFutures = getEventStreamFuturesFromRetrievalResults(executorResult,
                retrievalResultFutures);

        logger.debug(
                "Done with the RetrievalResult's; moving onto the individual event stream from each source for "
                        + pvName);
        pmansProfiler.mark("After retrieval results");

        for (Future<EventStream> future : eventStreamFutures) {
            EventStreamDesc sourceDesc = null;
            try (EventStream eventStream = future.get()) {
                sourceDesc = null; // Reset it for each loop iteration.
                sourceDesc = eventStream.getDescription();
                if (sourceDesc == null) {
                    logger.warn("Skipping event stream without a desc for pv " + pvName);
                    continue;
                }

                logger.debug("Processing event stream for pv " + pvName + " from source "
                        + ((eventStream.getDescription() != null) ? eventStream.getDescription().getSource()
                                : " unknown"));

                try {
                    mergeTypeInfo(typeInfo, sourceDesc, engineMetadata);
                } catch (MismatchedDBRTypeException mex) {
                    logger.error(mex.getMessage(), mex);
                    continue;
                }

                if (currentlyProcessingPV == null || !currentlyProcessingPV.equals(pvName)) {
                    logger.debug("Switching to new PV " + pvName
                            + " In some mime responses we insert special headers at the beginning of the response. Calling the hook for that");
                    currentlyProcessingPV = pvName;
                    mergeDedupCountingConsumer.processingPV(currentlyProcessingPV, start, end,
                            (eventStream != null) ? sourceDesc : null);
                }

                try {
                    // If the postProcessor does not have a consolidated event stream, we send each eventstream across as we encounter it.
                    // Else we send the consolidatedEventStream down below.
                    if (!(postProcessor instanceof PostProcessorWithConsolidatedEventStream)) {
                        mergeDedupCountingConsumer.consumeEventStream(eventStream);
                        resp.flushBuffer();
                    }
                } catch (Exception ex) {
                    if (ex != null && ex.toString() != null && ex.toString().contains("ClientAbortException")) {
                        // We check for ClientAbortException etc this way to avoid including tomcat jars in the build path.
                        logger.debug(
                                "Exception when consuming and flushing data from " + sourceDesc.getSource(),
                                ex);
                    } else {
                        logger.error("Exception when consuming and flushing data from " + sourceDesc.getSource()
                                + "-->" + ex.toString(), ex);
                    }
                }
                pmansProfiler.mark("After event stream " + eventStream.getDescription().getSource());
            } catch (Exception ex) {
                if (ex != null && ex.toString() != null && ex.toString().contains("ClientAbortException")) {
                    // We check for ClientAbortException etc this way to avoid including tomcat jars in the build path.
                    logger.debug("Exception when consuming and flushing data from "
                            + (sourceDesc != null ? sourceDesc.getSource() : "N/A"), ex);
                } else {
                    logger.error("Exception when consuming and flushing data from "
                            + (sourceDesc != null ? sourceDesc.getSource() : "N/A") + "-->" + ex.toString(),
                            ex);
                }
            }
        }

        if (postProcessor instanceof PostProcessorWithConsolidatedEventStream) {
            try (EventStream eventStream = ((PostProcessorWithConsolidatedEventStream) postProcessor)
                    .getConsolidatedEventStream()) {
                EventStreamDesc sourceDesc = eventStream.getDescription();
                if (sourceDesc == null) {
                    logger.error("Skipping event stream without a desc for pv " + pvName
                            + " and post processor " + postProcessor.getExtension());
                } else {
                    mergeDedupCountingConsumer.consumeEventStream(eventStream);
                    resp.flushBuffer();
                }
            }
        }

        // If the postProcessor needs to send final data across, give it a chance now...
        if (postProcessor instanceof AfterAllStreams) {
            EventStream finalEventStream = ((AfterAllStreams) postProcessor).anyFinalData();
            if (finalEventStream != null) {
                mergeDedupCountingConsumer.consumeEventStream(finalEventStream);
                resp.flushBuffer();
            }
        }

        pmansProfiler.mark("After writing all eventstreams to response");

        long s2 = System.currentTimeMillis();
        logger.info("For the complete request, found a total of "
                + mergeDedupCountingConsumer.totalEventsForAllPVs + " in " + (s2 - s1) + "(ms)" + " skipping "
                + mergeDedupCountingConsumer.skippedEventsForAllPVs + " events" + " deduping involved "
                + mergeDedupCountingConsumer.comparedEventsForAllPVs + " compares.");
    } catch (Exception ex) {
        if (ex != null && ex.toString() != null && ex.toString().contains("ClientAbortException")) {
            // We check for ClientAbortException etc this way to avoid including tomcat jars in the build path.
            logger.debug("Exception when retrieving data ", ex);
        } else {
            logger.error("Exception when retrieving data " + "-->" + ex.toString(), ex);
        }
    }
    pmansProfiler.mark("After all closes and flushing all buffers");

    // Till we determine all the if conditions where we log this, we log sparingly..
    if (pmansProfiler.totalTimeMS() > 5000) {
        logger.error("Retrieval time for " + pvName + " from " + startTimeStr + " to " + endTimeStr
                + pmansProfiler.toString());
    }
}

From source file:com.zimbra.cs.service.mail.ToXML.java

private static void addParts(Element root, MPartInfo mpiRoot, Set<MPartInfo> bodies, String prefix, int maxSize,
        boolean neuter, boolean excludeCalendarParts, String defaultCharset, boolean swallowContentExceptions,
        MsgContent wantContent) throws ServiceException {
    MPartInfo mpi = mpiRoot;//from   w  ww  .j  a va  2s .co  m
    LinkedList<Pair<Element, LinkedList<MPartInfo>>> queue = new LinkedList<Pair<Element, LinkedList<MPartInfo>>>();
    Pair<Element, LinkedList<MPartInfo>> level = new Pair<Element, LinkedList<MPartInfo>>(root,
            new LinkedList<MPartInfo>());
    level.getSecond().add(mpi);
    queue.add(level);

    VisitPhase phase = VisitPhase.PREVISIT;
    while (!queue.isEmpty()) {
        level = queue.getLast();
        LinkedList<MPartInfo> parts = level.getSecond();
        if (parts.isEmpty()) {
            queue.removeLast();
            phase = VisitPhase.POSTVISIT;
            continue;
        }

        mpi = parts.getFirst();
        Element child = addPart(phase, level.getFirst(), root, mpi, bodies, prefix, maxSize, neuter,
                excludeCalendarParts, defaultCharset, swallowContentExceptions, wantContent);
        if (phase == VisitPhase.PREVISIT && child != null && mpi.hasChildren()) {
            queue.addLast(new Pair<Element, LinkedList<MPartInfo>>(child,
                    new LinkedList<MPartInfo>(mpi.getChildren())));
        } else {
            parts.removeFirst();
            phase = VisitPhase.PREVISIT;
        }
    }
}

From source file:org.epics.archiverappliance.retrieval.DataRetrievalServlet.java

private void doGetMultiPV(HttpServletRequest req, HttpServletResponse resp)
        throws ServletException, IOException {

    PoorMansProfiler pmansProfiler = new PoorMansProfiler();

    // Gets the list of PVs specified by the `pv` parameter
    // String arrays might be inefficient for retrieval. In any case, they are sorted, which is essential later on.
    List<String> pvNames = Arrays.asList(req.getParameterValues("pv"));

    // Ensuring that the AA has finished starting up before requests are accepted.
    if (configService.getStartupState() != STARTUP_SEQUENCE.STARTUP_COMPLETE) {
        String msg = "Cannot process data retrieval requests for specified PVs ("
                + StringUtils.join(pvNames, ", ") + ") until the appliance has completely started up.";
        logger.error(msg);/*from   ww w . ja  va 2s.  c  o  m*/
        resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
        resp.sendError(HttpServletResponse.SC_SERVICE_UNAVAILABLE, msg);
        return;
    }

    // Getting various fields from arguments
    String startTimeStr = req.getParameter("from");
    String endTimeStr = req.getParameter("to");
    boolean useReduced = false;
    String useReducedStr = req.getParameter("usereduced");
    if (useReducedStr != null && !useReducedStr.equals("")) {
        try {
            useReduced = Boolean.parseBoolean(useReducedStr);
        } catch (Exception ex) {
            logger.error("Exception parsing usereduced", ex);
            useReduced = false;
        }
    }

    // Getting MIME type
    String extension = req.getPathInfo().split("\\.")[1];
    logger.info("Mime is " + extension);

    if (!extension.equals("json") && !extension.equals("raw") && !extension.equals("jplot")
            && !extension.equals("qw")) {
        String msg = "Mime type " + extension + " is not supported. Please use \"json\", \"jplot\" or \"raw\".";
        resp.setHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
        resp.sendError(HttpServletResponse.SC_BAD_REQUEST, msg);
        return;
    }

    boolean useChunkedEncoding = true;
    String doNotChunkStr = req.getParameter("donotchunk");
    if (doNotChunkStr != null && !doNotChunkStr.equals("false")) {
        logger.info("Turning off HTTP chunked encoding");
        useChunkedEncoding = false;
    }

    boolean fetchLatestMetadata = false;
    String fetchLatestMetadataStr = req.getParameter("fetchLatestMetadata");
    if (fetchLatestMetadataStr != null && fetchLatestMetadataStr.equals("true")) {
        logger.info("Adding a call to the engine to fetch the latest metadata");
        fetchLatestMetadata = true;
    }

    // For data retrieval we need a PV info. However, in case of PV's that have long since retired, we may not want to have PVTypeInfo's in the system.
    // So, we support a template PV that lays out the data sources.
    // During retrieval, you can pass in the PV as a template and we'll clone this and make a temporary copy.
    String retiredPVTemplate = req.getParameter("retiredPVTemplate");

    // Goes through given PVs and returns bad request error.
    int nullPVs = 0;
    for (String pvName : pvNames) {
        if (pvName == null) {
            nullPVs++;
        }
        if (nullPVs > 0) {
            logger.warn("Some PVs are null in the request.");
            resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
            resp.sendError(HttpServletResponse.SC_BAD_REQUEST);
            return;
        }
    }

    if (pvNames.toString().matches("^.*" + ARCH_APPL_PING_PV + ".*$")) {
        logger.debug("Processing ping PV - this is used to validate the connection with the client.");
        processPingPV(req, resp);
        return;
    }

    for (String pvName : pvNames)
        if (pvName.endsWith(".VAL")) {
            int len = pvName.length();
            pvName = pvName.substring(0, len - 4);
            logger.info("Removing .VAL from pvName for request giving " + pvName);
        }

    // ISO datetimes are of the form "2011-02-02T08:00:00.000Z"
    Timestamp end = TimeUtils.plusHours(TimeUtils.now(), 1);
    if (endTimeStr != null) {
        try {
            end = TimeUtils.convertFromISO8601String(endTimeStr);
        } catch (IllegalArgumentException ex) {
            try {
                end = TimeUtils.convertFromDateTimeStringWithOffset(endTimeStr);
            } catch (IllegalArgumentException ex2) {
                String msg = "Cannot parse time " + endTimeStr;
                logger.warn(msg, ex2);
                resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
                resp.sendError(HttpServletResponse.SC_BAD_REQUEST, msg);
                return;
            }
        }
    }

    // We get one day by default
    Timestamp start = TimeUtils.minusDays(end, 1);
    if (startTimeStr != null) {
        try {
            start = TimeUtils.convertFromISO8601String(startTimeStr);
        } catch (IllegalArgumentException ex) {
            try {
                start = TimeUtils.convertFromDateTimeStringWithOffset(startTimeStr);
            } catch (IllegalArgumentException ex2) {
                String msg = "Cannot parse time " + startTimeStr;
                logger.warn(msg, ex2);
                resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
                resp.sendError(HttpServletResponse.SC_BAD_REQUEST, msg);
                return;
            }
        }
    }

    if (end.before(start)) {
        String msg = "For request, end " + end.toString() + " is before start " + start.toString() + " for pvs "
                + StringUtils.join(pvNames, ", ");
        logger.error(msg);
        resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
        resp.sendError(HttpServletResponse.SC_BAD_REQUEST, msg);
        return;
    }

    LinkedList<TimeSpan> requestTimes = new LinkedList<TimeSpan>();

    // We can specify a list of time stamp pairs using the optional timeranges parameter
    String timeRangesStr = req.getParameter("timeranges");
    if (timeRangesStr != null) {
        boolean continueWithRequest = parseTimeRanges(resp, "[" + StringUtils.join(pvNames, ", ") + "]",
                requestTimes, timeRangesStr);
        if (!continueWithRequest) {
            // Cannot parse the time ranges properly; we so abort the request.
            String msg = "The specified time ranges could not be processed appropriately. Aborting.";
            logger.info(msg);
            resp.setHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
            resp.sendError(HttpServletResponse.SC_BAD_REQUEST, msg);
            return;
        }

        // Override the start and the end so that the mergededup consumer works correctly.
        start = requestTimes.getFirst().getStartTime();
        end = requestTimes.getLast().getEndTime();

    } else {
        requestTimes.add(new TimeSpan(start, end));
    }

    assert (requestTimes.size() > 0);

    // Get a post processor for each PV specified in pvNames
    // If PV in the form <pp>(<pv>), process it
    String postProcessorUserArg = req.getParameter("pp");
    List<String> postProcessorUserArgs = new ArrayList<>(pvNames.size());
    List<PostProcessor> postProcessors = new ArrayList<>(pvNames.size());
    for (int i = 0; i < pvNames.size(); i++) {
        postProcessorUserArgs.add(postProcessorUserArg);

        if (pvNames.get(i).contains("(")) {
            if (!pvNames.get(i).contains(")")) {
                String msg = "Unbalanced paren " + pvNames.get(i);
                logger.error(msg);
                resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
                resp.sendError(HttpServletResponse.SC_BAD_REQUEST, msg);
                return;
            }
            String[] components = pvNames.get(i).split("[(,)]");
            postProcessorUserArg = components[0];
            postProcessorUserArgs.set(i, postProcessorUserArg);
            pvNames.set(i, components[1]);
            if (components.length > 2) {
                for (int j = 2; j < components.length; j++) {
                    postProcessorUserArgs.set(i, postProcessorUserArgs.get(i) + "_" + components[j]);
                }
            }
            logger.info("After parsing the function call syntax pvName is " + pvNames.get(i)
                    + " and postProcessorUserArg is " + postProcessorUserArg);
        }
        postProcessors.add(PostProcessors.findPostProcessor(postProcessorUserArg));
    }

    List<PVTypeInfo> typeInfos = new ArrayList<PVTypeInfo>(pvNames.size());
    for (int i = 0; i < pvNames.size(); i++) {
        typeInfos.add(PVNames.determineAppropriatePVTypeInfo(pvNames.get(i), configService));
    }
    pmansProfiler.mark("After PVTypeInfo");

    for (int i = 0; i < pvNames.size(); i++)
        if (typeInfos.get(i) == null && RetrievalState.includeExternalServers(req)) {
            logger.debug(
                    "Checking to see if pv " + pvNames.get(i) + " is served by a external Archiver Server");
            typeInfos.set(i,
                    checkIfPVisServedByExternalServer(pvNames.get(i), start, req, resp, useChunkedEncoding));
        }

    for (int i = 0; i < pvNames.size(); i++) {
        if (typeInfos.get(i) == null) {
            // TODO Only needed if we're forwarding the request to another server.
            if (resp.isCommitted()) {
                logger.debug("Proxied the data thru an external server for PV " + pvNames.get(i));
                return;
            }

            if (retiredPVTemplate != null) {
                PVTypeInfo templateTypeInfo = PVNames.determineAppropriatePVTypeInfo(retiredPVTemplate,
                        configService);
                if (templateTypeInfo != null) {
                    typeInfos.set(i, new PVTypeInfo(pvNames.get(i), templateTypeInfo));
                    typeInfos.get(i).setPaused(true);
                    typeInfos.get(i).setApplianceIdentity(configService.getMyApplianceInfo().getIdentity());
                    // Somehow tell the code downstream that this is a fake typeInfos.
                    typeInfos.get(i).setSamplingMethod(SamplingMethod.DONT_ARCHIVE);
                    logger.debug("Using a template PV for " + pvNames.get(i)
                            + " Need to determine the actual DBR type.");
                    setActualDBRTypeFromData(pvNames.get(i), typeInfos.get(i), configService);
                }
            }
        }

        if (typeInfos.get(i) == null) {
            String msg = "Unable to find typeinfo for pv " + pvNames.get(i);
            logger.error(msg);
            resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
            resp.sendError(HttpServletResponse.SC_NOT_FOUND, msg);
            return;
        }

        if (postProcessors.get(i) == null) {
            if (useReduced) {
                String defaultPPClassName = configService.getInstallationProperties().getProperty(
                        "org.epics.archiverappliance.retrieval.DefaultUseReducedPostProcessor",
                        FirstSamplePP.class.getName());
                logger.debug("Using the default usereduced preprocessor " + defaultPPClassName);
                try {
                    postProcessors.set(i, (PostProcessor) Class.forName(defaultPPClassName).newInstance());
                } catch (Exception ex) {
                    logger.error("Exception constructing new instance of post processor " + defaultPPClassName,
                            ex);
                    postProcessors.set(i, null);
                }
            }
        }

        if (postProcessors.get(i) == null) {
            logger.debug("Using the default raw preprocessor");
            postProcessors.set(i, new DefaultRawPostProcessor());
        }
    }

    // Get the appliances for each of the PVs
    List<ApplianceInfo> applianceForPVs = new ArrayList<ApplianceInfo>(pvNames.size());
    for (int i = 0; i < pvNames.size(); i++) {
        applianceForPVs.add(configService.getApplianceForPV(pvNames.get(i)));
        if (applianceForPVs.get(i) == null) {
            // TypeInfo cannot be null here...
            assert (typeInfos.get(i) != null);
            applianceForPVs.set(i, configService.getAppliance(typeInfos.get(i).getApplianceIdentity()));
        }
    }

    /*
     * Retrieving the external appliances if the current appliance has not got the PV assigned to it, and
     * storing the associated information of the PVs in that appliance.
     */
    Map<String, ArrayList<PVInfoForClusterRetrieval>> applianceToPVs = new HashMap<String, ArrayList<PVInfoForClusterRetrieval>>();
    for (int i = 0; i < pvNames.size(); i++) {
        if (!applianceForPVs.get(i).equals(configService.getMyApplianceInfo())) {

            ArrayList<PVInfoForClusterRetrieval> appliancePVs = applianceToPVs
                    .get(applianceForPVs.get(i).getMgmtURL());
            appliancePVs = (appliancePVs == null) ? new ArrayList<>() : appliancePVs;
            PVInfoForClusterRetrieval pvInfoForRetrieval = new PVInfoForClusterRetrieval(pvNames.get(i),
                    typeInfos.get(i), postProcessors.get(i), applianceForPVs.get(i));
            appliancePVs.add(pvInfoForRetrieval);
            applianceToPVs.put(applianceForPVs.get(i).getRetrievalURL(), appliancePVs);
        }
    }

    List<List<Future<EventStream>>> listOfEventStreamFuturesLists = new ArrayList<List<Future<EventStream>>>();
    Set<String> retrievalURLs = applianceToPVs.keySet();
    if (retrievalURLs.size() > 0) {
        // Get list of PVs and redirect them to appropriate appliance to be retrieved.
        String retrievalURL;
        ArrayList<PVInfoForClusterRetrieval> pvInfos;
        while (!((retrievalURL = retrievalURLs.iterator().next()) != null)) {
            // Get array list of PVs for appliance
            pvInfos = applianceToPVs.get(retrievalURL);
            try {
                List<List<Future<EventStream>>> resultFromForeignAppliances = retrieveEventStreamFromForeignAppliance(
                        req, resp, pvInfos, requestTimes, useChunkedEncoding,
                        retrievalURL + "/../data/getDataForPVs.raw", start, end);
                listOfEventStreamFuturesLists.addAll(resultFromForeignAppliances);
            } catch (Exception ex) {
                logger.error("Failed to retrieve " + StringUtils.join(pvNames, ", ") + " from " + retrievalURL
                        + ".");
                return;
            }
        }
    }

    pmansProfiler.mark("After Appliance Info");

    // Setting post processor for PVs, taking into account whether there is a field in the PV name
    List<String> pvNamesFromRequests = new ArrayList<String>(pvNames.size());
    for (int i = 0; i < pvNames.size(); i++) {
        String pvName = pvNames.get(i);
        pvNamesFromRequests.add(pvName);
        PVTypeInfo typeInfo = typeInfos.get(i);
        postProcessorUserArg = postProcessorUserArgs.get(i);

        // If a field is specified in a PV name, it will create a post processor for that
        String fieldName = PVNames.getFieldName(pvName);
        if (fieldName != null && !fieldName.equals("") && !pvName.equals(typeInfo.getPvName())) {
            logger.debug("We reset the pvName " + pvName + " to one from the typeinfo " + typeInfo.getPvName()
                    + " as that determines the name of the stream. " + "Also using ExtraFieldsPostProcessor.");
            pvNames.set(i, typeInfo.getPvName());
            postProcessors.set(i, new ExtraFieldsPostProcessor(fieldName));
        }

        try {
            // Postprocessors get their mandatory arguments from the request.
            // If user does not pass in the expected request, throw an exception.
            postProcessors.get(i).initialize(postProcessorUserArg, pvName);
        } catch (Exception ex) {
            String msg = "Postprocessor threw an exception during initialization for " + pvName;
            logger.error(msg, ex);
            resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
            resp.sendError(HttpServletResponse.SC_NOT_FOUND, msg);
            return;
        }
    }

    /*
     * MergeDedupConsumer is what writes PB data in its respective format to the HTML response.
     * The response, after the MergeDedupConsumer is created, contains the following:
     * 
     * 1) The content type for the response.
     * 2) Any additional headers for the particular MIME response.
     * 
     * Additionally, the MergeDedupConsumer instance holds a reference to the output stream
     * that is used to write to the HTML response. It is stored under the name `os`.
     */
    MergeDedupConsumer mergeDedupCountingConsumer;
    try {
        mergeDedupCountingConsumer = createMergeDedupConsumer(resp, extension, useChunkedEncoding);
    } catch (ServletException se) {
        String msg = "Exception when retrieving data " + "-->" + se.toString();
        logger.error(msg, se);
        resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
        resp.sendError(HttpServletResponse.SC_SERVICE_UNAVAILABLE, msg);
        return;
    }

    /* 
     * BasicContext contains the PV name and the expected return type. Used to access PB files.
     * RetrievalExecutorResult contains a thread service class and the time spans Presumably, the 
     * thread service is what retrieves the data, and the BasicContext is the context in which it 
     * works.
     */
    List<HashMap<String, String>> engineMetadatas = new ArrayList<HashMap<String, String>>();
    try {
        List<BasicContext> retrievalContexts = new ArrayList<BasicContext>(pvNames.size());
        List<RetrievalExecutorResult> executorResults = new ArrayList<RetrievalExecutorResult>(pvNames.size());
        for (int i = 0; i < pvNames.size(); i++) {
            if (fetchLatestMetadata) {
                // Make a call to the engine to fetch the latest metadata.
                engineMetadatas.add(fetchLatestMedataFromEngine(pvNames.get(i), applianceForPVs.get(i)));
            }
            retrievalContexts.add(new BasicContext(typeInfos.get(i).getDBRType(), pvNamesFromRequests.get(i)));
            executorResults.add(determineExecutorForPostProcessing(pvNames.get(i), typeInfos.get(i),
                    requestTimes, req, postProcessors.get(i)));
        }

        /*
         * There are as many Future objects in the eventStreamFutures List as there are periods over 
         * which to fetch data. Retrieval of data happen here in parallel.
         */
        List<LinkedList<Future<RetrievalResult>>> listOfRetrievalResultFuturesLists = new ArrayList<LinkedList<Future<RetrievalResult>>>();
        for (int i = 0; i < pvNames.size(); i++) {
            listOfRetrievalResultFuturesLists.add(resolveAllDataSources(pvNames.get(i), typeInfos.get(i),
                    postProcessors.get(i), applianceForPVs.get(i), retrievalContexts.get(i),
                    executorResults.get(i), req, resp));
        }
        pmansProfiler.mark("After data source resolution");

        for (int i = 0; i < pvNames.size(); i++) {
            // Data is retrieved here
            List<Future<EventStream>> eventStreamFutures = getEventStreamFuturesFromRetrievalResults(
                    executorResults.get(i), listOfRetrievalResultFuturesLists.get(i));
            listOfEventStreamFuturesLists.add(eventStreamFutures);
        }

    } catch (Exception ex) {
        if (ex != null && ex.toString() != null && ex.toString().contains("ClientAbortException")) {
            // We check for ClientAbortException etc this way to avoid including tomcat jars in the build path.
            logger.debug("Exception when retrieving data ", ex);
        } else {
            logger.error("Exception when retrieving data " + "-->" + ex.toString(), ex);
        }
    }

    long s1 = System.currentTimeMillis();
    String currentlyProcessingPV = null;

    /*
     * The following try bracket goes through each of the streams in the list of event stream futures.
     * 
     * It is intended that the process goes through one PV at a time.
     */
    try {
        for (int i = 0; i < pvNames.size(); i++) {
            List<Future<EventStream>> eventStreamFutures = listOfEventStreamFuturesLists.get(i);
            String pvName = pvNames.get(i);
            PVTypeInfo typeInfo = typeInfos.get(i);
            HashMap<String, String> engineMetadata = fetchLatestMetadata ? engineMetadatas.get(i) : null;
            PostProcessor postProcessor = postProcessors.get(i);

            logger.debug("Done with the RetrievalResults; moving onto the individual event stream "
                    + "from each source for " + StringUtils.join(pvNames, ", "));
            pmansProfiler.mark("After retrieval results");
            for (Future<EventStream> future : eventStreamFutures) {
                EventStreamDesc sourceDesc = null;

                // Gets the result of a data retrieval
                try (EventStream eventStream = future.get()) {
                    sourceDesc = null; // Reset it for each loop iteration.
                    sourceDesc = eventStream.getDescription();
                    if (sourceDesc == null) {
                        logger.warn("Skipping event stream without a desc for pv " + pvName);
                        continue;
                    }

                    logger.debug("Processing event stream for pv " + pvName + " from source "
                            + ((eventStream.getDescription() != null) ? eventStream.getDescription().getSource()
                                    : " unknown"));

                    try {
                        mergeTypeInfo(typeInfo, sourceDesc, engineMetadata);
                    } catch (MismatchedDBRTypeException mex) {
                        logger.error(mex.getMessage(), mex);
                        continue;
                    }

                    if (currentlyProcessingPV == null || !currentlyProcessingPV.equals(pvName)) {
                        logger.debug("Switching to new PV " + pvName + " In some mime responses we insert "
                                + "special headers at the beginning of the response. Calling the hook for "
                                + "that");
                        currentlyProcessingPV = pvName;
                        /*
                         * Goes through the PB data stream over a period of time. The relevant MIME response
                         * actually deal with the processing of the PV. `start` and `end` refer to the very
                         * beginning and very end of the time period being retrieved over, regardless of
                         * whether it is divided up or not.
                         */
                        mergeDedupCountingConsumer.processingPV(currentlyProcessingPV, start, end,
                                (eventStream != null) ? sourceDesc : null);
                    }

                    try {
                        // If the postProcessor does not have a consolidated event stream, we send each eventstream across as we encounter it.
                        // Else we send the consolidatedEventStream down below.
                        if (!(postProcessor instanceof PostProcessorWithConsolidatedEventStream)) {
                            /*
                             * The eventStream object contains all the data over the current period.
                             */
                            mergeDedupCountingConsumer.consumeEventStream(eventStream);
                            resp.flushBuffer();
                        }
                    } catch (Exception ex) {
                        if (ex != null && ex.toString() != null
                                && ex.toString().contains("ClientAbortException")) {
                            // We check for ClientAbortException etc this way to avoid including tomcat jars in the build path.
                            logger.debug(
                                    "Exception when consuming and flushing data from " + sourceDesc.getSource(),
                                    ex);
                        } else {
                            logger.error("Exception when consuming and flushing data from "
                                    + sourceDesc.getSource() + "-->" + ex.toString(), ex);
                        }
                    }
                    pmansProfiler.mark("After event stream " + eventStream.getDescription().getSource());
                } catch (Exception ex) {
                    if (ex != null && ex.toString() != null && ex.toString().contains("ClientAbortException")) {
                        // We check for ClientAbortException etc this way to avoid including tomcat jars in the build path.
                        logger.debug("Exception when consuming and flushing data from "
                                + (sourceDesc != null ? sourceDesc.getSource() : "N/A"), ex);
                    } else {
                        logger.error("Exception when consuming and flushing data from "
                                + (sourceDesc != null ? sourceDesc.getSource() : "N/A") + "-->" + ex.toString(),
                                ex);
                    }
                }
            }

            // TODO Go through data from other appliances here

            if (postProcessor instanceof PostProcessorWithConsolidatedEventStream) {
                try (EventStream eventStream = ((PostProcessorWithConsolidatedEventStream) postProcessor)
                        .getConsolidatedEventStream()) {
                    EventStreamDesc sourceDesc = eventStream.getDescription();
                    if (sourceDesc == null) {
                        logger.error("Skipping event stream without a desc for pv " + pvName
                                + " and post processor " + postProcessor.getExtension());
                    } else {
                        mergeDedupCountingConsumer.consumeEventStream(eventStream);
                        resp.flushBuffer();
                    }
                }
            }

            // If the postProcessor needs to send final data across, give it a chance now...
            if (postProcessor instanceof AfterAllStreams) {
                EventStream finalEventStream = ((AfterAllStreams) postProcessor).anyFinalData();
                if (finalEventStream != null) {
                    mergeDedupCountingConsumer.consumeEventStream(finalEventStream);
                    resp.flushBuffer();
                }
            }

            pmansProfiler.mark("After writing all eventstreams to response");
        }
    } catch (Exception ex) {
        if (ex != null && ex.toString() != null && ex.toString().contains("ClientAbortException")) {
            // We check for ClientAbortException etc this way to avoid including tomcat jars in the build path.
            logger.debug("Exception when retrieving data ", ex);
        } else {
            logger.error("Exception when retrieving data " + "-->" + ex.toString(), ex);
        }
    }

    long s2 = System.currentTimeMillis();
    logger.info("For the complete request, found a total of " + mergeDedupCountingConsumer.totalEventsForAllPVs
            + " in " + (s2 - s1) + "(ms)" + " skipping " + mergeDedupCountingConsumer.skippedEventsForAllPVs
            + " events" + " deduping involved " + mergeDedupCountingConsumer.comparedEventsForAllPVs
            + " compares.");

    pmansProfiler.mark("After all closes and flushing all buffers");

    // Till we determine all the if conditions where we log this, we log sparingly..
    if (pmansProfiler.totalTimeMS() > 5000) {
        logger.error("Retrieval time for " + StringUtils.join(pvNames, ", ") + " from " + startTimeStr + " to "
                + endTimeStr + ": " + pmansProfiler.toString());
    }

    mergeDedupCountingConsumer.close();
}

From source file:com.datatorrent.stram.StreamingContainerManager.java

/**
 * Compute checkpoints required for a given operator instance to be recovered.
 * This is done by looking at checkpoints available for downstream dependencies first,
 * and then selecting the most recent available checkpoint that is smaller than downstream.
 *
 * @param operator Operator instance for which to find recovery checkpoint
 * @param ctx      Context into which to collect traversal info
 *///from w  ww.  ja v a  2  s  .c o m
public void updateRecoveryCheckpoints(PTOperator operator, UpdateCheckpointsContext ctx) {
    if (operator.getRecoveryCheckpoint().windowId < ctx.committedWindowId.longValue()) {
        ctx.committedWindowId.setValue(operator.getRecoveryCheckpoint().windowId);
    }

    if (operator.getState() == PTOperator.State.ACTIVE && (ctx.currentTms
            - operator.stats.lastWindowIdChangeTms) > operator.stats.windowProcessingTimeoutMillis) {
        // if the checkpoint is ahead, then it is not blocked but waiting for activation (state-less recovery, at-most-once)
        if (ctx.committedWindowId.longValue() >= operator.getRecoveryCheckpoint().windowId) {
            LOG.debug("Marking operator {} blocked committed window {}, recovery window {}", operator,
                    Codec.getStringWindowId(ctx.committedWindowId.longValue()),
                    Codec.getStringWindowId(operator.getRecoveryCheckpoint().windowId));
            ctx.blocked.add(operator);
        }
    }

    // the most recent checkpoint eligible for recovery based on downstream state
    Checkpoint maxCheckpoint = Checkpoint.INITIAL_CHECKPOINT;

    Set<OperatorMeta> checkpointGroup = ctx.checkpointGroups.get(operator.getOperatorMeta());
    if (checkpointGroup == null) {
        checkpointGroup = Collections.singleton(operator.getOperatorMeta());
    }
    // find intersection of checkpoints that group can collectively move to
    TreeSet<Checkpoint> commonCheckpoints = new TreeSet<>(new Checkpoint.CheckpointComparator());
    synchronized (operator.checkpoints) {
        commonCheckpoints.addAll(operator.checkpoints);
    }
    Set<PTOperator> groupOpers = new HashSet<>(checkpointGroup.size());
    boolean pendingDeploy = operator.getState() == PTOperator.State.PENDING_DEPLOY;
    if (checkpointGroup.size() > 1) {
        for (OperatorMeta om : checkpointGroup) {
            Collection<PTOperator> operators = plan.getAllOperators(om);
            for (PTOperator groupOper : operators) {
                synchronized (groupOper.checkpoints) {
                    commonCheckpoints.retainAll(groupOper.checkpoints);
                }
                // visit all downstream operators of the group
                ctx.visited.add(groupOper);
                groupOpers.add(groupOper);
                pendingDeploy |= operator.getState() == PTOperator.State.PENDING_DEPLOY;
            }
        }
        // highest common checkpoint
        if (!commonCheckpoints.isEmpty()) {
            maxCheckpoint = commonCheckpoints.last();
        }
    } else {
        // without logical grouping, treat partitions as independent
        // this is especially important for parallel partitioning
        ctx.visited.add(operator);
        groupOpers.add(operator);
        maxCheckpoint = operator.getRecentCheckpoint();
        if (ctx.recovery && maxCheckpoint.windowId == Stateless.WINDOW_ID && operator.isOperatorStateLess()) {
            long currentWindowId = WindowGenerator.getWindowId(ctx.currentTms, this.vars.windowStartMillis,
                    this.getLogicalPlan().getValue(LogicalPlan.STREAMING_WINDOW_SIZE_MILLIS));
            maxCheckpoint = new Checkpoint(currentWindowId, 0, 0);
        }
    }

    // DFS downstream operators
    for (PTOperator groupOper : groupOpers) {
        for (PTOperator.PTOutput out : groupOper.getOutputs()) {
            for (PTOperator.PTInput sink : out.sinks) {
                PTOperator sinkOperator = sink.target;
                if (groupOpers.contains(sinkOperator)) {
                    continue; // downstream operator within group
                }
                if (!ctx.visited.contains(sinkOperator)) {
                    // downstream traversal
                    updateRecoveryCheckpoints(sinkOperator, ctx);
                }
                // recovery window id cannot move backwards
                // when dynamically adding new operators
                if (sinkOperator.getRecoveryCheckpoint().windowId >= operator
                        .getRecoveryCheckpoint().windowId) {
                    maxCheckpoint = Checkpoint.min(maxCheckpoint, sinkOperator.getRecoveryCheckpoint());
                }

                if (ctx.blocked.contains(sinkOperator)) {
                    if (sinkOperator.stats.getCurrentWindowId() == operator.stats.getCurrentWindowId()) {
                        // downstream operator is blocked by this operator
                        ctx.blocked.remove(sinkOperator);
                    }
                }
            }
        }
    }

    // find the common checkpoint that is <= downstream recovery checkpoint
    if (!commonCheckpoints.contains(maxCheckpoint)) {
        if (!commonCheckpoints.isEmpty()) {
            maxCheckpoint = Objects.firstNonNull(commonCheckpoints.floor(maxCheckpoint), maxCheckpoint);
        }
    }

    for (PTOperator groupOper : groupOpers) {
        // checkpoint frozen during deployment
        if (!pendingDeploy || ctx.recovery) {
            // remove previous checkpoints
            Checkpoint c1 = Checkpoint.INITIAL_CHECKPOINT;
            LinkedList<Checkpoint> checkpoints = groupOper.checkpoints;
            synchronized (checkpoints) {
                if (!checkpoints.isEmpty() && (checkpoints.getFirst()).windowId <= maxCheckpoint.windowId) {
                    c1 = checkpoints.getFirst();
                    Checkpoint c2;
                    while (checkpoints.size() > 1
                            && ((c2 = checkpoints.get(1)).windowId) <= maxCheckpoint.windowId) {
                        checkpoints.removeFirst();
                        //LOG.debug("Checkpoint to delete: operator={} windowId={}", operator.getName(), c1);
                        this.purgeCheckpoints.add(new Pair<PTOperator, Long>(groupOper, c1.windowId));
                        c1 = c2;
                    }
                } else {
                    if (ctx.recovery && checkpoints.isEmpty() && groupOper.isOperatorStateLess()) {
                        LOG.debug("Adding checkpoint for stateless operator {} {}", groupOper,
                                Codec.getStringWindowId(maxCheckpoint.windowId));
                        c1 = groupOper.addCheckpoint(maxCheckpoint.windowId, this.vars.windowStartMillis);
                    }
                }
            }
            //LOG.debug("Operator {} checkpoints: commit {} recent {}", new Object[] {operator.getName(), c1, operator.checkpoints});
            groupOper.setRecoveryCheckpoint(c1);
        } else {
            LOG.debug("Skipping checkpoint update {} during {}", groupOper, groupOper.getState());
        }
    }

}