Example usage for java.sql Timestamp before

List of usage examples for java.sql Timestamp before

Introduction

In this page you can find the example usage for java.sql Timestamp before.

Prototype

public boolean before(Timestamp ts) 

Source Link

Document

Indicates whether this Timestamp object is earlier than the given Timestamp object.

Usage

From source file:jp.co.ntts.vhut.logic.PrivateCloudLogic.java

@Override
public void updateVlanResources() {
    long count = jdbcManager.from(Network.class)
            .where(new SimpleWhere().ne(network().status(), NetworkStatus.RESERVED_BY_SYSTEM)).getCount();
    List<VlanResource> toBeInsertList = new ArrayList<VlanResource>();
    List<VlanResource> toBeUpdateList = new ArrayList<VlanResource>();
    Timestamp currentDate = TimestampUtil.getCurrentDateAsTimestamp();
    Timestamp minDate = TimestampUtil.subtract(currentDate, 1, TimestampUtil.Unit.DAY);
    Timestamp maxDate = TimestampUtil.add(cloudConfig.getReservationEndTimeMax(), 1, TimestampUtil.Unit.DAY);
    SimpleDateFormat dateFormat = new SimpleDateFormat("yyyyMMdd");
    Map<String, VlanResource> resourceMap = new HashMap<String, VlanResource>();
    //?/*from  w  ww  .ja v  a  2  s . com*/
    List<VlanResource> oldResourceList = jdbcManager.from(VlanResource.class)
            .where(new SimpleWhere().eq(vlanResource().cloudId(), cloudId).ge(vlanResource().time(), minDate)
                    .le(vlanResource().time(), maxDate))
            .getResultList();
    for (VlanResource oldResource : oldResourceList) {
        String key = dateFormat.format(oldResource.time);
        resourceMap.put(key, oldResource);
    }
    //??
    Timestamp targetDate = currentDate;
    //        maxDate = TimestampUtil.add(maxDate, 1, TimestampUtil.Unit.DAY);
    while (targetDate.before(maxDate)) {
        String key = dateFormat.format(targetDate);
        VlanResource oldResource = resourceMap.get(key);
        if (oldResource == null) {
            //
            VlanResource newResource = new VlanResource();
            newResource.id = targetDate.getTime();
            newResource.vlanMax = (int) count;
            newResource.vlanTerminablyUsed = 0;
            newResource.cloudId = cloudId;
            newResource.time = targetDate;
            toBeInsertList.add(newResource);
        } else {
            //
            oldResource.vlanMax = (int) count;
            toBeUpdateList.add(oldResource);
        }
        targetDate = TimestampUtil.add(targetDate, 1, TimestampUtil.Unit.DAY);
    }
    //??
    if (toBeInsertList.size() > 0) {
        jdbcManager.insertBatch(toBeInsertList).execute();
    }
    //??
    if (toBeUpdateList.size() > 0) {
        jdbcManager.updateBatch(toBeUpdateList).execute();
    }
}

From source file:org.kuali.kra.protocol.actions.ActionHelperBase.java

/**
 * Prepares all protocol actions for being filtered by setting their isInFilterView attribute.
 *//*from  w  ww .j  a v a2 s  .  c  o  m*/
public void initFilterDatesView() {
    java.util.Date dayBeforeStartDate = null;
    java.util.Date dayAfterEndDate = null;

    if (filteredHistoryStartDate != null && filteredHistoryEndDate != null) {
        dayBeforeStartDate = DateUtils.addDays(filteredHistoryStartDate, -1);
        dayAfterEndDate = DateUtils.addDays(filteredHistoryEndDate, 1);
    }

    for (ProtocolActionBase protocolAction : getSortedProtocolActions()) {
        Timestamp actionDate = protocolAction.getActionDate();
        if (dayBeforeStartDate != null && dayAfterEndDate != null) {
            protocolAction.setIsInFilterView(
                    actionDate.after(dayBeforeStartDate) && actionDate.before(dayAfterEndDate));
        } else {
            protocolAction.setIsInFilterView(true);
        }
        if (protocolAction.getIsInFilterView()) {

        }
    }
}

From source file:org.wso2.carbon.user.core.jdbc.JDBCUserStoreManager.java

/**
 *
 *//*from  w  w  w .  j  a v  a 2  s .c o  m*/
public boolean doAuthenticate(String userName, Object credential) throws UserStoreException {

    if (!checkUserNameValid(userName)) {
        return false;
    }

    if (!checkUserPasswordValid(credential)) {
        return false;
    }

    if (UserCoreUtil.isRegistryAnnonymousUser(userName)) {
        log.error("Anonnymous user trying to login");
        return false;
    }

    Connection dbConnection = null;
    ResultSet rs = null;
    PreparedStatement prepStmt = null;
    String sqlstmt = null;
    String password = (String) credential;
    boolean isAuthed = false;

    try {
        dbConnection = getDBConnection();
        dbConnection.setAutoCommit(false);

        if (isCaseSensitiveUsername()) {
            sqlstmt = realmConfig.getUserStoreProperty(JDBCRealmConstants.SELECT_USER);
        } else {
            sqlstmt = realmConfig.getUserStoreProperty(JDBCRealmConstants.SELECT_USER_CASE_INSENSITIVE);
        }

        if (log.isDebugEnabled()) {
            log.debug(sqlstmt);
        }

        prepStmt = dbConnection.prepareStatement(sqlstmt);
        prepStmt.setString(1, userName);
        if (sqlstmt.contains(UserCoreConstants.UM_TENANT_COLUMN)) {
            prepStmt.setInt(2, tenantId);
        }

        rs = prepStmt.executeQuery();

        if (rs.next() == true) {
            String storedPassword = rs.getString(3);
            String saltValue = null;
            if ("true".equalsIgnoreCase(
                    realmConfig.getUserStoreProperty(JDBCRealmConstants.STORE_SALTED_PASSWORDS))) {
                saltValue = rs.getString(4);
            }

            boolean requireChange = rs.getBoolean(5);
            Timestamp changedTime = rs.getTimestamp(6);

            GregorianCalendar gc = new GregorianCalendar();
            gc.add(GregorianCalendar.HOUR, -24);
            Date date = gc.getTime();

            if (requireChange == true && changedTime.before(date)) {
                isAuthed = false;
            } else {
                password = this.preparePassword(password, saltValue);
                if ((storedPassword != null) && (storedPassword.equals(password))) {
                    isAuthed = true;
                }
            }
        }
    } catch (SQLException e) {
        String msg = "Error occurred while retrieving user authentication info for user : " + userName;
        if (log.isDebugEnabled()) {
            log.debug(msg, e);
        }
        throw new UserStoreException("Authentication Failure", e);
    } finally {
        DatabaseUtil.closeAllConnections(dbConnection, rs, prepStmt);
    }

    if (log.isDebugEnabled()) {
        log.debug("User " + userName + " login attempt. Login success :: " + isAuthed);
    }

    return isAuthed;
}

From source file:org.etudes.component.app.melete.ModuleDB.java

private int getStudentNavSeqNo(String userId, String courseId, int currSeqNo, boolean prevFlag) {
    Connection dbConnection = null;
    List resList = new ArrayList();
    java.sql.Timestamp currentTimestamp = null;
    int navSeqNo = -1;
    String sql;//w  w w .j a  va  2  s . c  om

    try {
        dbConnection = SqlService.borrowConnection();
        ResultSet rs, accRs = null;
        //First get all sequence numbers after this one from course module table
        if (prevFlag) {
            sql = "select cm.seq_no from melete_course_module cm,melete_module_shdates msh where cm.course_id = ? and cm.delete_flag = 0 and cm.archv_flag = 0 and cm.seq_no < ? and cm.module_id = msh.module_id and ((msh.start_date is null or msh.start_date < ?) and (msh.end_date is null or msh.end_date > ?)) order by cm.seq_no desc";
        } else {
            sql = "select cm.seq_no from melete_course_module cm,melete_module_shdates msh where cm.course_id = ? and cm.delete_flag = 0 and cm.archv_flag = 0 and cm.seq_no > ? and cm.module_id = msh.module_id and ((msh.start_date is null or msh.start_date < ?) and (msh.end_date is null or msh.end_date > ?)) order by cm.seq_no";
        }
        PreparedStatement pstmt = dbConnection.prepareStatement(sql);
        pstmt.setString(1, courseId);
        pstmt.setInt(2, currSeqNo);
        currentTimestamp = new java.sql.Timestamp(Calendar.getInstance().getTimeInMillis());
        pstmt.setTimestamp(3, currentTimestamp);
        pstmt.setTimestamp(4, currentTimestamp);
        rs = pstmt.executeQuery();
        if (rs != null) {
            //Add them to resList
            while (rs.next()) {
                resList.add(rs.getInt("seq_no"));
            }
        }
        //Get all access entries for user   
        if (prevFlag) {
            sql = "select cm.seq_no, sa.start_date, sa.end_date from melete_course_module cm,melete_special_access sa where cm.course_id = ? and cm.delete_flag = 0 and cm.archv_flag = 0 and cm.seq_no < ? and cm.module_id = sa.module_id and sa.users like ? order by cm.seq_no desc";
        } else {
            sql = "select cm.seq_no, sa.start_date, sa.end_date from melete_course_module cm,melete_special_access sa where cm.course_id = ? and cm.delete_flag = 0 and cm.archv_flag = 0 and cm.seq_no > ? and cm.module_id = sa.module_id and sa.users like ? order by cm.seq_no";
        }
        PreparedStatement accPstmt = dbConnection.prepareStatement(sql);
        accPstmt.setString(1, courseId);
        accPstmt.setInt(2, currSeqNo);
        accPstmt.setString(3, "%" + userId + "%");
        accRs = accPstmt.executeQuery();
        Map accMap = new HashMap();
        if (accRs != null) {
            //Add them to accMap  
            while (accRs.next()) {
                AccessDates ad = new AccessDates(accRs.getTimestamp("start_date"),
                        accRs.getTimestamp("end_date"));
                accMap.put(accRs.getInt("seq_no"), ad);
            }
        }
        accRs.close();
        accPstmt.close();
        //If there are no access entries, return the first entry in resList
        if ((accMap == null) || (accMap.size() == 0)) {
            if (resList.size() == 0)
                navSeqNo = -1;
            else
                navSeqNo = ((Integer) resList.get(0)).intValue();
        } else {
            List removeList = new ArrayList();
            Iterator it = accMap.entrySet().iterator();
            //Check to see if there are any blocked entries in accMap. If so, add them to removeList
            while (it.hasNext()) {
                Map.Entry pairs = (Map.Entry) it.next();
                Integer seq = (Integer) pairs.getKey();
                AccessDates ad = (AccessDates) pairs.getValue();
                currentTimestamp = new java.sql.Timestamp(Calendar.getInstance().getTimeInMillis());
                java.sql.Timestamp startTimestamp = ad.getAccStartTimestamp();
                java.sql.Timestamp endTimestamp = ad.getAccEndTimestamp();
                if (((startTimestamp == null) || (startTimestamp.before(currentTimestamp)))
                        && ((endTimestamp == null) || (endTimestamp.after(currentTimestamp)))) {
                    continue;
                } else {
                    removeList.add(seq);
                }
            }
            //If there are blocked entries, remove them from both resList and accMap
            if (removeList.size() > 0) {
                for (Iterator itr = removeList.listIterator(); itr.hasNext();) {
                    Integer seq = (Integer) itr.next();
                    if (resList.size() > 0) {
                        if (resList.indexOf(seq) != -1)
                            resList.remove(seq);
                    }
                    accMap.remove(seq);
                }
            }
            //Return sequence number appropriately
            if ((resList.size() == 0) && (accMap.size() == 0)) {
                navSeqNo = -1;
            }
            if ((resList.size() == 0) && (accMap.size() > 0))
                navSeqNo = ((Integer) ((Map.Entry) accMap.entrySet().iterator().next()).getKey()).intValue();
            if ((resList.size() > 0) && (accMap.size() == 0))
                navSeqNo = ((Integer) resList.get(0)).intValue();
            if ((resList.size() > 0) && (accMap.size() > 0))
                navSeqNo = Math.max(
                        ((Integer) ((Map.Entry) accMap.entrySet().iterator().next()).getKey()).intValue(),
                        ((Integer) resList.get(0)).intValue());
        }
        rs.close();
        pstmt.close();
    } catch (Exception e) {
        if (logger.isErrorEnabled())
            logger.error(e.toString());
    } finally {
        try {
            if (dbConnection != null)
                SqlService.returnConnection(dbConnection);
        } catch (Exception e1) {
            if (logger.isErrorEnabled())
                logger.error(e1.toString());
        }
    }
    return navSeqNo;
}

From source file:org.ofbiz.accounting.invoice.InvoiceServices.java

public static Map<String, Object> checkInvoicePaymentApplications(DispatchContext ctx,
        Map<String, Object> context) {
    Delegator delegator = ctx.getDelegator();
    LocalDispatcher dispatcher = ctx.getDispatcher();
    GenericValue userLogin = (GenericValue) context.get("userLogin");
    Locale locale = (Locale) context.get("locale");

    if (DECIMALS == -1 || ROUNDING == -1) {
        return ServiceUtil.returnError(
                UtilProperties.getMessage(resource, "AccountingAritmeticPropertiesNotConfigured", locale));
    }/*from   w  w w .ja  v  a 2s  .  c  om*/

    String invoiceId = (String) context.get("invoiceId");
    GenericValue invoice = null;
    try {
        invoice = EntityQuery.use(delegator).from("Invoice").where("invoiceId", invoiceId).queryOne();
    } catch (GenericEntityException e) {
        Debug.logError(e, "Problem getting Invoice for Invoice ID" + invoiceId, module);
        return ServiceUtil.returnError(UtilProperties.getMessage(resource, "AccountingInvoiceNotFound",
                UtilMisc.toMap("invoiceId", invoiceId), locale));
    }

    // Ignore invoices that aren't ready yet
    if (!invoice.getString("statusId").equals("INVOICE_READY")) {
        return ServiceUtil.returnSuccess();
    }

    // Get the payment applications that can be used to pay the invoice
    List<GenericValue> paymentAppl = null;
    try {
        paymentAppl = EntityQuery.use(delegator).from("PaymentAndApplication").where("invoiceId", invoiceId)
                .queryList();
        // For each payment application, select only those that are RECEIVED or SENT based on whether the payment is a RECEIPT or DISBURSEMENT respectively
        for (Iterator<GenericValue> iter = paymentAppl.iterator(); iter.hasNext();) {
            GenericValue payment = iter.next();
            if ("PMNT_RECEIVED".equals(payment.get("statusId")) && UtilAccounting.isReceipt(payment)) {
                continue; // keep
            }
            if ("PMNT_SENT".equals(payment.get("statusId")) && UtilAccounting.isDisbursement(payment)) {
                continue; // keep
            }
            // all other cases, remove the payment application
            iter.remove();
        }
    } catch (GenericEntityException e) {
        Debug.logError(e, "Problem getting PaymentApplication(s) for Invoice ID " + invoiceId, module);
        return ServiceUtil.returnError(UtilProperties.getMessage(resource,
                "AccountingProblemGettingPaymentApplication", UtilMisc.toMap("invoiceId", invoiceId), locale));
    }

    Map<String, BigDecimal> payments = FastMap.newInstance();
    Timestamp paidDate = null;
    for (GenericValue payAppl : paymentAppl) {
        payments.put(payAppl.getString("paymentId"), payAppl.getBigDecimal("amountApplied"));

        // paidDate will be the last date (chronologically) of all the Payments applied to this invoice
        Timestamp paymentDate = payAppl.getTimestamp("effectiveDate");
        if (paymentDate != null) {
            if ((paidDate == null) || (paidDate.before(paymentDate))) {
                paidDate = paymentDate;
            }
        }
    }

    BigDecimal totalPayments = ZERO;
    for (BigDecimal amount : payments.values()) {
        if (amount == null)
            amount = ZERO;
        totalPayments = totalPayments.add(amount).setScale(DECIMALS, ROUNDING);
    }

    if (totalPayments.signum() == 1) {
        BigDecimal invoiceTotal = InvoiceWorker.getInvoiceTotal(delegator, invoiceId);
        if (Debug.verboseOn()) {
            Debug.logVerbose("Invoice #" + invoiceId + " total: " + invoiceTotal, module);
            Debug.logVerbose("Total payments : " + totalPayments, module);
        }
        if (totalPayments.compareTo(invoiceTotal) >= 0) { // this checks that totalPayments is greater than or equal to invoiceTotal
            // this invoice is paid
            Map<String, Object> svcCtx = UtilMisc.toMap("statusId", "INVOICE_PAID", "invoiceId", invoiceId,
                    "paidDate", paidDate, "userLogin", userLogin);
            try {
                dispatcher.runSync("setInvoiceStatus", svcCtx);
            } catch (GenericServiceException e) {
                Debug.logError(e, "Problem changing invoice status to INVOICE_PAID" + svcCtx, module);
                return ServiceUtil.returnError(
                        UtilProperties.getMessage(resource, "AccountingProblemChangingInvoiceStatusTo",
                                UtilMisc.toMap("newStatus", "INVOICE_PAID"), locale));
            }
        }
    } else {
        Debug.logInfo("No payments found for Invoice #" + invoiceId, module);
    }

    return ServiceUtil.returnSuccess();
}

From source file:org.kuali.kfs.module.tem.document.service.impl.TravelDocumentServiceImpl.java

@Override
public List<String> findMatchingTrips(TravelDocument travelDocument) {

    String travelDocumentIdentifier = travelDocument.getTravelDocumentIdentifier();
    Integer temProfileId = travelDocument.getTemProfileId();
    Timestamp earliestTripBeginDate = null;
    Timestamp greatestTripEndDate = null;

    List<TravelReimbursementDocument> documents = findReimbursementDocuments(travelDocumentIdentifier);
    for (TravelReimbursementDocument document : documents) {
        Timestamp tripBegin = document.getTripBegin();
        Timestamp tripEnd = document.getTripEnd();
        if (ObjectUtils.isNull(earliestTripBeginDate) && ObjectUtils.isNull(greatestTripEndDate)) {
            earliestTripBeginDate = tripBegin;
            greatestTripEndDate = tripEnd;
        } else {/* w  ww  .  j  a v a 2s.c o  m*/
            earliestTripBeginDate = tripBegin.before(earliestTripBeginDate) ? tripBegin : earliestTripBeginDate;
            greatestTripEndDate = tripEnd.after(greatestTripEndDate) ? tripEnd : greatestTripEndDate;

        }
    }

    // TR with no TAs created from mainmenu
    if (documents.isEmpty() && ObjectUtils.isNotNull(travelDocument.getTripBegin())
            && ObjectUtils.isNotNull(travelDocument.getTripEnd())) {
        earliestTripBeginDate = getTripBeginDate(travelDocument.getTripBegin());
        greatestTripEndDate = getTripEndDate(travelDocument.getTripEnd());
    }

    List<TravelReimbursementDocument> matchDocs = (List<TravelReimbursementDocument>) travelDocumentDao
            .findMatchingTrips(temProfileId, earliestTripBeginDate, greatestTripEndDate);
    List<String> documentIds = new ArrayList<String>();
    for (TravelReimbursementDocument document : matchDocs) {
        if (!travelDocument.getDocumentNumber().equals(document.getDocumentNumber())) {
            documentIds.add(document.getDocumentNumber());
        }
    }
    return documentIds;
}

From source file:org.apache.ofbiz.accounting.invoice.InvoiceServices.java

public static Map<String, Object> checkInvoicePaymentApplications(DispatchContext ctx,
        Map<String, Object> context) {
    Delegator delegator = ctx.getDelegator();
    LocalDispatcher dispatcher = ctx.getDispatcher();
    GenericValue userLogin = (GenericValue) context.get("userLogin");
    Locale locale = (Locale) context.get("locale");

    if (DECIMALS == -1 || ROUNDING == -1) {
        return ServiceUtil.returnError(
                UtilProperties.getMessage(resource, "AccountingAritmeticPropertiesNotConfigured", locale));
    }//from   w w w  .j  a  v  a  2s . c  o m

    String invoiceId = (String) context.get("invoiceId");
    GenericValue invoice = null;
    try {
        invoice = EntityQuery.use(delegator).from("Invoice").where("invoiceId", invoiceId).queryOne();
    } catch (GenericEntityException e) {
        Debug.logError(e, "Problem getting Invoice for Invoice ID" + invoiceId, module);
        return ServiceUtil.returnError(UtilProperties.getMessage(resource, "AccountingInvoiceNotFound",
                UtilMisc.toMap("invoiceId", invoiceId), locale));
    }

    // Ignore invoices that aren't ready yet
    if (!invoice.getString("statusId").equals("INVOICE_READY")) {
        return ServiceUtil.returnSuccess();
    }

    // Get the payment applications that can be used to pay the invoice
    List<GenericValue> paymentAppl = null;
    try {
        paymentAppl = EntityQuery.use(delegator).from("PaymentAndApplication").where("invoiceId", invoiceId)
                .queryList();
        // For each payment application, select only those that are RECEIVED or SENT based on whether the payment is a RECEIPT or DISBURSEMENT respectively
        for (Iterator<GenericValue> iter = paymentAppl.iterator(); iter.hasNext();) {
            GenericValue payment = iter.next();
            if ("PMNT_RECEIVED".equals(payment.get("statusId")) && UtilAccounting.isReceipt(payment)) {
                continue; // keep
            }
            if ("PMNT_SENT".equals(payment.get("statusId")) && UtilAccounting.isDisbursement(payment)) {
                continue; // keep
            }
            // all other cases, remove the payment application
            iter.remove();
        }
    } catch (GenericEntityException e) {
        Debug.logError(e, "Problem getting PaymentApplication(s) for Invoice ID " + invoiceId, module);
        return ServiceUtil.returnError(UtilProperties.getMessage(resource,
                "AccountingProblemGettingPaymentApplication", UtilMisc.toMap("invoiceId", invoiceId), locale));
    }

    Map<String, BigDecimal> payments = new HashMap<String, BigDecimal>();
    Timestamp paidDate = null;
    for (GenericValue payAppl : paymentAppl) {
        payments.put(payAppl.getString("paymentId"), payAppl.getBigDecimal("amountApplied"));

        // paidDate will be the last date (chronologically) of all the Payments applied to this invoice
        Timestamp paymentDate = payAppl.getTimestamp("effectiveDate");
        if (paymentDate != null) {
            if ((paidDate == null) || (paidDate.before(paymentDate))) {
                paidDate = paymentDate;
            }
        }
    }

    BigDecimal totalPayments = ZERO;
    for (BigDecimal amount : payments.values()) {
        if (amount == null)
            amount = ZERO;
        totalPayments = totalPayments.add(amount).setScale(DECIMALS, ROUNDING);
    }

    if (totalPayments.signum() == 1) {
        BigDecimal invoiceTotal = InvoiceWorker.getInvoiceTotal(delegator, invoiceId);
        if (Debug.verboseOn()) {
            Debug.logVerbose("Invoice #" + invoiceId + " total: " + invoiceTotal, module);
            Debug.logVerbose("Total payments : " + totalPayments, module);
        }
        if (totalPayments.compareTo(invoiceTotal) >= 0) { // this checks that totalPayments is greater than or equal to invoiceTotal
            // this invoice is paid
            Map<String, Object> svcCtx = UtilMisc.toMap("statusId", "INVOICE_PAID", "invoiceId", invoiceId,
                    "paidDate", paidDate, "userLogin", userLogin);
            try {
                dispatcher.runSync("setInvoiceStatus", svcCtx);
            } catch (GenericServiceException e) {
                Debug.logError(e, "Problem changing invoice status to INVOICE_PAID" + svcCtx, module);
                return ServiceUtil.returnError(
                        UtilProperties.getMessage(resource, "AccountingProblemChangingInvoiceStatusTo",
                                UtilMisc.toMap("newStatus", "INVOICE_PAID"), locale));
            }
        }
    } else {
        Debug.logInfo("No payments found for Invoice #" + invoiceId, module);
    }

    return ServiceUtil.returnSuccess();
}

From source file:org.epics.archiverappliance.retrieval.DataRetrievalServlet.java

private void doGetSinglePV(HttpServletRequest req, HttpServletResponse resp)
        throws ServletException, IOException {

    PoorMansProfiler pmansProfiler = new PoorMansProfiler();
    String pvName = req.getParameter("pv");

    if (configService.getStartupState() != STARTUP_SEQUENCE.STARTUP_COMPLETE) {
        String msg = "Cannot process data retrieval requests for PV " + pvName
                + " until the appliance has completely started up.";
        logger.error(msg);/*from ww  w.  j a v a 2s  . com*/
        resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
        resp.sendError(HttpServletResponse.SC_SERVICE_UNAVAILABLE, msg);
        return;
    }

    String startTimeStr = req.getParameter("from");
    String endTimeStr = req.getParameter("to");
    boolean useReduced = false;
    String useReducedStr = req.getParameter("usereduced");
    if (useReducedStr != null && !useReducedStr.equals("")) {
        try {
            useReduced = Boolean.parseBoolean(useReducedStr);
        } catch (Exception ex) {
            logger.error("Exception parsing usereduced", ex);
            useReduced = false;
        }
    }
    String extension = req.getPathInfo().split("\\.")[1];
    logger.info("Mime is " + extension);

    boolean useChunkedEncoding = true;
    String doNotChunkStr = req.getParameter("donotchunk");
    if (doNotChunkStr != null && !doNotChunkStr.equals("false")) {
        logger.info("Turning off HTTP chunked encoding");
        useChunkedEncoding = false;
    }

    boolean fetchLatestMetadata = false;
    String fetchLatestMetadataStr = req.getParameter("fetchLatestMetadata");
    if (fetchLatestMetadataStr != null && fetchLatestMetadataStr.equals("true")) {
        logger.info("Adding a call to the engine to fetch the latest metadata");
        fetchLatestMetadata = true;
    }

    // For data retrieval we need a PV info. However, in case of PV's that have long since retired, we may not want to have PVTypeInfo's in the system.
    // So, we support a template PV that lays out the data sources.
    // During retrieval, you can pass in the PV as a template and we'll clone this and make a temporary copy.
    String retiredPVTemplate = req.getParameter("retiredPVTemplate");

    if (pvName == null) {
        String msg = "PV name is null.";
        resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
        resp.sendError(HttpServletResponse.SC_BAD_REQUEST, msg);
        return;
    }

    if (pvName.equals(ARCH_APPL_PING_PV)) {
        logger.debug("Processing ping PV - this is used to validate the connection with the client.");
        processPingPV(req, resp);
        return;
    }

    if (pvName.endsWith(".VAL")) {
        int len = pvName.length();
        pvName = pvName.substring(0, len - 4);
        logger.info("Removing .VAL from pvName for request giving " + pvName);
    }

    // ISO datetimes are of the form "2011-02-02T08:00:00.000Z"
    Timestamp end = TimeUtils.plusHours(TimeUtils.now(), 1);
    if (endTimeStr != null) {
        try {
            end = TimeUtils.convertFromISO8601String(endTimeStr);
        } catch (IllegalArgumentException ex) {
            try {
                end = TimeUtils.convertFromDateTimeStringWithOffset(endTimeStr);
            } catch (IllegalArgumentException ex2) {
                String msg = "Cannot parse time" + endTimeStr;
                logger.warn(msg, ex2);
                resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
                resp.sendError(HttpServletResponse.SC_BAD_REQUEST, msg);
                return;
            }
        }
    }

    // We get one day by default
    Timestamp start = TimeUtils.minusDays(end, 1);
    if (startTimeStr != null) {
        try {
            start = TimeUtils.convertFromISO8601String(startTimeStr);
        } catch (IllegalArgumentException ex) {
            try {
                start = TimeUtils.convertFromDateTimeStringWithOffset(startTimeStr);
            } catch (IllegalArgumentException ex2) {
                String msg = "Cannot parse time " + startTimeStr;
                logger.warn(msg, ex2);
                resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
                resp.sendError(HttpServletResponse.SC_BAD_REQUEST, msg);
                return;
            }
        }
    }

    if (end.before(start)) {
        String msg = "For request, end " + end.toString() + " is before start " + start.toString() + " for pv "
                + pvName;
        logger.error(msg);
        resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
        resp.sendError(HttpServletResponse.SC_BAD_REQUEST);
        return;
    }

    LinkedList<TimeSpan> requestTimes = new LinkedList<TimeSpan>();

    // We can specify a list of time stamp pairs using the optional timeranges parameter
    String timeRangesStr = req.getParameter("timeranges");
    if (timeRangesStr != null) {
        boolean continueWithRequest = parseTimeRanges(resp, pvName, requestTimes, timeRangesStr);
        if (!continueWithRequest) {
            // Cannot parse the time ranges properly; we so abort the request.
            return;
        }

        // Override the start and the end so that the mergededup consumer works correctly.
        start = requestTimes.getFirst().getStartTime();
        end = requestTimes.getLast().getEndTime();

    } else {
        requestTimes.add(new TimeSpan(start, end));
    }

    assert (requestTimes.size() > 0);

    String postProcessorUserArg = req.getParameter("pp");
    if (pvName.contains("(")) {
        if (!pvName.contains(")")) {
            logger.error("Unbalanced paran " + pvName);
            resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
            resp.sendError(HttpServletResponse.SC_BAD_REQUEST);
            return;
        }
        String[] components = pvName.split("[(,)]");
        postProcessorUserArg = components[0];
        pvName = components[1];
        if (components.length > 2) {
            for (int i = 2; i < components.length; i++) {
                postProcessorUserArg = postProcessorUserArg + "_" + components[i];
            }
        }
        logger.info("After parsing the function call syntax pvName is " + pvName
                + " and postProcessorUserArg is " + postProcessorUserArg);
    }

    PostProcessor postProcessor = PostProcessors.findPostProcessor(postProcessorUserArg);

    PVTypeInfo typeInfo = PVNames.determineAppropriatePVTypeInfo(pvName, configService);
    pmansProfiler.mark("After PVTypeInfo");

    if (typeInfo == null && RetrievalState.includeExternalServers(req)) {
        logger.debug("Checking to see if pv " + pvName + " is served by a external Archiver Server");
        typeInfo = checkIfPVisServedByExternalServer(pvName, start, req, resp, useChunkedEncoding);
    }

    if (typeInfo == null) {
        if (resp.isCommitted()) {
            logger.debug("Proxied the data thru an external server for PV " + pvName);
            return;
        }
    }

    if (typeInfo == null) {
        if (retiredPVTemplate != null) {
            PVTypeInfo templateTypeInfo = PVNames.determineAppropriatePVTypeInfo(retiredPVTemplate,
                    configService);
            if (templateTypeInfo != null) {
                typeInfo = new PVTypeInfo(pvName, templateTypeInfo);
                typeInfo.setPaused(true);
                typeInfo.setApplianceIdentity(configService.getMyApplianceInfo().getIdentity());
                // Somehow tell the code downstream that this is a fake typeInfo.
                typeInfo.setSamplingMethod(SamplingMethod.DONT_ARCHIVE);
                logger.debug("Using a template PV for " + pvName + " Need to determine the actual DBR type.");
                setActualDBRTypeFromData(pvName, typeInfo, configService);
            }
        }
    }

    if (typeInfo == null) {
        logger.error("Unable to find typeinfo for pv " + pvName);
        resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
        resp.sendError(HttpServletResponse.SC_NOT_FOUND);
        return;
    }

    if (postProcessor == null) {
        if (useReduced) {
            String defaultPPClassName = configService.getInstallationProperties().getProperty(
                    "org.epics.archiverappliance.retrieval.DefaultUseReducedPostProcessor",
                    FirstSamplePP.class.getName());
            logger.debug("Using the default usereduced preprocessor " + defaultPPClassName);
            try {
                postProcessor = (PostProcessor) Class.forName(defaultPPClassName).newInstance();
            } catch (Exception ex) {
                logger.error("Exception constructing new instance of post processor " + defaultPPClassName, ex);
                postProcessor = null;
            }
        }
    }

    if (postProcessor == null) {
        logger.debug("Using the default raw preprocessor");
        postProcessor = new DefaultRawPostProcessor();
    }

    ApplianceInfo applianceForPV = configService.getApplianceForPV(pvName);
    if (applianceForPV == null) {
        // TypeInfo cannot be null here...
        assert (typeInfo != null);
        applianceForPV = configService.getAppliance(typeInfo.getApplianceIdentity());
    }

    if (!applianceForPV.equals(configService.getMyApplianceInfo())) {
        // Data for pv is elsewhere. Proxy/redirect and return.
        proxyRetrievalRequest(req, resp, pvName, useChunkedEncoding,
                applianceForPV.getRetrievalURL() + "/../data");
        return;
    }

    pmansProfiler.mark("After Appliance Info");

    String pvNameFromRequest = pvName;

    String fieldName = PVNames.getFieldName(pvName);
    if (fieldName != null && !fieldName.equals("") && !pvName.equals(typeInfo.getPvName())) {
        logger.debug("We reset the pvName " + pvName + " to one from the typeinfo " + typeInfo.getPvName()
                + " as that determines the name of the stream. Also using ExtraFieldsPostProcessor");
        pvName = typeInfo.getPvName();
        postProcessor = new ExtraFieldsPostProcessor(fieldName);
    }

    try {
        // Postprocessors get their mandatory arguments from the request.
        // If user does not pass in the expected request, throw an exception.
        postProcessor.initialize(postProcessorUserArg, pvName);
    } catch (Exception ex) {
        logger.error("Postprocessor threw an exception during initialization for " + pvName, ex);
        resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
        resp.sendError(HttpServletResponse.SC_NOT_FOUND);
        return;
    }

    try (BasicContext retrievalContext = new BasicContext(typeInfo.getDBRType(), pvNameFromRequest);
            MergeDedupConsumer mergeDedupCountingConsumer = createMergeDedupConsumer(resp, extension,
                    useChunkedEncoding);
            RetrievalExecutorResult executorResult = determineExecutorForPostProcessing(pvName, typeInfo,
                    requestTimes, req, postProcessor)) {
        HashMap<String, String> engineMetadata = null;
        if (fetchLatestMetadata) {
            // Make a call to the engine to fetch the latest metadata.
            engineMetadata = fetchLatestMedataFromEngine(pvName, applianceForPV);
        }

        LinkedList<Future<RetrievalResult>> retrievalResultFutures = resolveAllDataSources(pvName, typeInfo,
                postProcessor, applianceForPV, retrievalContext, executorResult, req, resp);
        pmansProfiler.mark("After data source resolution");

        long s1 = System.currentTimeMillis();
        String currentlyProcessingPV = null;

        List<Future<EventStream>> eventStreamFutures = getEventStreamFuturesFromRetrievalResults(executorResult,
                retrievalResultFutures);

        logger.debug(
                "Done with the RetrievalResult's; moving onto the individual event stream from each source for "
                        + pvName);
        pmansProfiler.mark("After retrieval results");

        for (Future<EventStream> future : eventStreamFutures) {
            EventStreamDesc sourceDesc = null;
            try (EventStream eventStream = future.get()) {
                sourceDesc = null; // Reset it for each loop iteration.
                sourceDesc = eventStream.getDescription();
                if (sourceDesc == null) {
                    logger.warn("Skipping event stream without a desc for pv " + pvName);
                    continue;
                }

                logger.debug("Processing event stream for pv " + pvName + " from source "
                        + ((eventStream.getDescription() != null) ? eventStream.getDescription().getSource()
                                : " unknown"));

                try {
                    mergeTypeInfo(typeInfo, sourceDesc, engineMetadata);
                } catch (MismatchedDBRTypeException mex) {
                    logger.error(mex.getMessage(), mex);
                    continue;
                }

                if (currentlyProcessingPV == null || !currentlyProcessingPV.equals(pvName)) {
                    logger.debug("Switching to new PV " + pvName
                            + " In some mime responses we insert special headers at the beginning of the response. Calling the hook for that");
                    currentlyProcessingPV = pvName;
                    mergeDedupCountingConsumer.processingPV(currentlyProcessingPV, start, end,
                            (eventStream != null) ? sourceDesc : null);
                }

                try {
                    // If the postProcessor does not have a consolidated event stream, we send each eventstream across as we encounter it.
                    // Else we send the consolidatedEventStream down below.
                    if (!(postProcessor instanceof PostProcessorWithConsolidatedEventStream)) {
                        mergeDedupCountingConsumer.consumeEventStream(eventStream);
                        resp.flushBuffer();
                    }
                } catch (Exception ex) {
                    if (ex != null && ex.toString() != null && ex.toString().contains("ClientAbortException")) {
                        // We check for ClientAbortException etc this way to avoid including tomcat jars in the build path.
                        logger.debug(
                                "Exception when consuming and flushing data from " + sourceDesc.getSource(),
                                ex);
                    } else {
                        logger.error("Exception when consuming and flushing data from " + sourceDesc.getSource()
                                + "-->" + ex.toString(), ex);
                    }
                }
                pmansProfiler.mark("After event stream " + eventStream.getDescription().getSource());
            } catch (Exception ex) {
                if (ex != null && ex.toString() != null && ex.toString().contains("ClientAbortException")) {
                    // We check for ClientAbortException etc this way to avoid including tomcat jars in the build path.
                    logger.debug("Exception when consuming and flushing data from "
                            + (sourceDesc != null ? sourceDesc.getSource() : "N/A"), ex);
                } else {
                    logger.error("Exception when consuming and flushing data from "
                            + (sourceDesc != null ? sourceDesc.getSource() : "N/A") + "-->" + ex.toString(),
                            ex);
                }
            }
        }

        if (postProcessor instanceof PostProcessorWithConsolidatedEventStream) {
            try (EventStream eventStream = ((PostProcessorWithConsolidatedEventStream) postProcessor)
                    .getConsolidatedEventStream()) {
                EventStreamDesc sourceDesc = eventStream.getDescription();
                if (sourceDesc == null) {
                    logger.error("Skipping event stream without a desc for pv " + pvName
                            + " and post processor " + postProcessor.getExtension());
                } else {
                    mergeDedupCountingConsumer.consumeEventStream(eventStream);
                    resp.flushBuffer();
                }
            }
        }

        // If the postProcessor needs to send final data across, give it a chance now...
        if (postProcessor instanceof AfterAllStreams) {
            EventStream finalEventStream = ((AfterAllStreams) postProcessor).anyFinalData();
            if (finalEventStream != null) {
                mergeDedupCountingConsumer.consumeEventStream(finalEventStream);
                resp.flushBuffer();
            }
        }

        pmansProfiler.mark("After writing all eventstreams to response");

        long s2 = System.currentTimeMillis();
        logger.info("For the complete request, found a total of "
                + mergeDedupCountingConsumer.totalEventsForAllPVs + " in " + (s2 - s1) + "(ms)" + " skipping "
                + mergeDedupCountingConsumer.skippedEventsForAllPVs + " events" + " deduping involved "
                + mergeDedupCountingConsumer.comparedEventsForAllPVs + " compares.");
    } catch (Exception ex) {
        if (ex != null && ex.toString() != null && ex.toString().contains("ClientAbortException")) {
            // We check for ClientAbortException etc this way to avoid including tomcat jars in the build path.
            logger.debug("Exception when retrieving data ", ex);
        } else {
            logger.error("Exception when retrieving data " + "-->" + ex.toString(), ex);
        }
    }
    pmansProfiler.mark("After all closes and flushing all buffers");

    // Till we determine all the if conditions where we log this, we log sparingly..
    if (pmansProfiler.totalTimeMS() > 5000) {
        logger.error("Retrieval time for " + pvName + " from " + startTimeStr + " to " + endTimeStr
                + pmansProfiler.toString());
    }
}

From source file:org.epics.archiverappliance.retrieval.DataRetrievalServlet.java

private void doGetMultiPV(HttpServletRequest req, HttpServletResponse resp)
        throws ServletException, IOException {

    PoorMansProfiler pmansProfiler = new PoorMansProfiler();

    // Gets the list of PVs specified by the `pv` parameter
    // String arrays might be inefficient for retrieval. In any case, they are sorted, which is essential later on.
    List<String> pvNames = Arrays.asList(req.getParameterValues("pv"));

    // Ensuring that the AA has finished starting up before requests are accepted.
    if (configService.getStartupState() != STARTUP_SEQUENCE.STARTUP_COMPLETE) {
        String msg = "Cannot process data retrieval requests for specified PVs ("
                + StringUtils.join(pvNames, ", ") + ") until the appliance has completely started up.";
        logger.error(msg);/*from w ww  .  j a v a  2 s  .co m*/
        resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
        resp.sendError(HttpServletResponse.SC_SERVICE_UNAVAILABLE, msg);
        return;
    }

    // Getting various fields from arguments
    String startTimeStr = req.getParameter("from");
    String endTimeStr = req.getParameter("to");
    boolean useReduced = false;
    String useReducedStr = req.getParameter("usereduced");
    if (useReducedStr != null && !useReducedStr.equals("")) {
        try {
            useReduced = Boolean.parseBoolean(useReducedStr);
        } catch (Exception ex) {
            logger.error("Exception parsing usereduced", ex);
            useReduced = false;
        }
    }

    // Getting MIME type
    String extension = req.getPathInfo().split("\\.")[1];
    logger.info("Mime is " + extension);

    if (!extension.equals("json") && !extension.equals("raw") && !extension.equals("jplot")
            && !extension.equals("qw")) {
        String msg = "Mime type " + extension + " is not supported. Please use \"json\", \"jplot\" or \"raw\".";
        resp.setHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
        resp.sendError(HttpServletResponse.SC_BAD_REQUEST, msg);
        return;
    }

    boolean useChunkedEncoding = true;
    String doNotChunkStr = req.getParameter("donotchunk");
    if (doNotChunkStr != null && !doNotChunkStr.equals("false")) {
        logger.info("Turning off HTTP chunked encoding");
        useChunkedEncoding = false;
    }

    boolean fetchLatestMetadata = false;
    String fetchLatestMetadataStr = req.getParameter("fetchLatestMetadata");
    if (fetchLatestMetadataStr != null && fetchLatestMetadataStr.equals("true")) {
        logger.info("Adding a call to the engine to fetch the latest metadata");
        fetchLatestMetadata = true;
    }

    // For data retrieval we need a PV info. However, in case of PV's that have long since retired, we may not want to have PVTypeInfo's in the system.
    // So, we support a template PV that lays out the data sources.
    // During retrieval, you can pass in the PV as a template and we'll clone this and make a temporary copy.
    String retiredPVTemplate = req.getParameter("retiredPVTemplate");

    // Goes through given PVs and returns bad request error.
    int nullPVs = 0;
    for (String pvName : pvNames) {
        if (pvName == null) {
            nullPVs++;
        }
        if (nullPVs > 0) {
            logger.warn("Some PVs are null in the request.");
            resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
            resp.sendError(HttpServletResponse.SC_BAD_REQUEST);
            return;
        }
    }

    if (pvNames.toString().matches("^.*" + ARCH_APPL_PING_PV + ".*$")) {
        logger.debug("Processing ping PV - this is used to validate the connection with the client.");
        processPingPV(req, resp);
        return;
    }

    for (String pvName : pvNames)
        if (pvName.endsWith(".VAL")) {
            int len = pvName.length();
            pvName = pvName.substring(0, len - 4);
            logger.info("Removing .VAL from pvName for request giving " + pvName);
        }

    // ISO datetimes are of the form "2011-02-02T08:00:00.000Z"
    Timestamp end = TimeUtils.plusHours(TimeUtils.now(), 1);
    if (endTimeStr != null) {
        try {
            end = TimeUtils.convertFromISO8601String(endTimeStr);
        } catch (IllegalArgumentException ex) {
            try {
                end = TimeUtils.convertFromDateTimeStringWithOffset(endTimeStr);
            } catch (IllegalArgumentException ex2) {
                String msg = "Cannot parse time " + endTimeStr;
                logger.warn(msg, ex2);
                resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
                resp.sendError(HttpServletResponse.SC_BAD_REQUEST, msg);
                return;
            }
        }
    }

    // We get one day by default
    Timestamp start = TimeUtils.minusDays(end, 1);
    if (startTimeStr != null) {
        try {
            start = TimeUtils.convertFromISO8601String(startTimeStr);
        } catch (IllegalArgumentException ex) {
            try {
                start = TimeUtils.convertFromDateTimeStringWithOffset(startTimeStr);
            } catch (IllegalArgumentException ex2) {
                String msg = "Cannot parse time " + startTimeStr;
                logger.warn(msg, ex2);
                resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
                resp.sendError(HttpServletResponse.SC_BAD_REQUEST, msg);
                return;
            }
        }
    }

    if (end.before(start)) {
        String msg = "For request, end " + end.toString() + " is before start " + start.toString() + " for pvs "
                + StringUtils.join(pvNames, ", ");
        logger.error(msg);
        resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
        resp.sendError(HttpServletResponse.SC_BAD_REQUEST, msg);
        return;
    }

    LinkedList<TimeSpan> requestTimes = new LinkedList<TimeSpan>();

    // We can specify a list of time stamp pairs using the optional timeranges parameter
    String timeRangesStr = req.getParameter("timeranges");
    if (timeRangesStr != null) {
        boolean continueWithRequest = parseTimeRanges(resp, "[" + StringUtils.join(pvNames, ", ") + "]",
                requestTimes, timeRangesStr);
        if (!continueWithRequest) {
            // Cannot parse the time ranges properly; we so abort the request.
            String msg = "The specified time ranges could not be processed appropriately. Aborting.";
            logger.info(msg);
            resp.setHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
            resp.sendError(HttpServletResponse.SC_BAD_REQUEST, msg);
            return;
        }

        // Override the start and the end so that the mergededup consumer works correctly.
        start = requestTimes.getFirst().getStartTime();
        end = requestTimes.getLast().getEndTime();

    } else {
        requestTimes.add(new TimeSpan(start, end));
    }

    assert (requestTimes.size() > 0);

    // Get a post processor for each PV specified in pvNames
    // If PV in the form <pp>(<pv>), process it
    String postProcessorUserArg = req.getParameter("pp");
    List<String> postProcessorUserArgs = new ArrayList<>(pvNames.size());
    List<PostProcessor> postProcessors = new ArrayList<>(pvNames.size());
    for (int i = 0; i < pvNames.size(); i++) {
        postProcessorUserArgs.add(postProcessorUserArg);

        if (pvNames.get(i).contains("(")) {
            if (!pvNames.get(i).contains(")")) {
                String msg = "Unbalanced paren " + pvNames.get(i);
                logger.error(msg);
                resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
                resp.sendError(HttpServletResponse.SC_BAD_REQUEST, msg);
                return;
            }
            String[] components = pvNames.get(i).split("[(,)]");
            postProcessorUserArg = components[0];
            postProcessorUserArgs.set(i, postProcessorUserArg);
            pvNames.set(i, components[1]);
            if (components.length > 2) {
                for (int j = 2; j < components.length; j++) {
                    postProcessorUserArgs.set(i, postProcessorUserArgs.get(i) + "_" + components[j]);
                }
            }
            logger.info("After parsing the function call syntax pvName is " + pvNames.get(i)
                    + " and postProcessorUserArg is " + postProcessorUserArg);
        }
        postProcessors.add(PostProcessors.findPostProcessor(postProcessorUserArg));
    }

    List<PVTypeInfo> typeInfos = new ArrayList<PVTypeInfo>(pvNames.size());
    for (int i = 0; i < pvNames.size(); i++) {
        typeInfos.add(PVNames.determineAppropriatePVTypeInfo(pvNames.get(i), configService));
    }
    pmansProfiler.mark("After PVTypeInfo");

    for (int i = 0; i < pvNames.size(); i++)
        if (typeInfos.get(i) == null && RetrievalState.includeExternalServers(req)) {
            logger.debug(
                    "Checking to see if pv " + pvNames.get(i) + " is served by a external Archiver Server");
            typeInfos.set(i,
                    checkIfPVisServedByExternalServer(pvNames.get(i), start, req, resp, useChunkedEncoding));
        }

    for (int i = 0; i < pvNames.size(); i++) {
        if (typeInfos.get(i) == null) {
            // TODO Only needed if we're forwarding the request to another server.
            if (resp.isCommitted()) {
                logger.debug("Proxied the data thru an external server for PV " + pvNames.get(i));
                return;
            }

            if (retiredPVTemplate != null) {
                PVTypeInfo templateTypeInfo = PVNames.determineAppropriatePVTypeInfo(retiredPVTemplate,
                        configService);
                if (templateTypeInfo != null) {
                    typeInfos.set(i, new PVTypeInfo(pvNames.get(i), templateTypeInfo));
                    typeInfos.get(i).setPaused(true);
                    typeInfos.get(i).setApplianceIdentity(configService.getMyApplianceInfo().getIdentity());
                    // Somehow tell the code downstream that this is a fake typeInfos.
                    typeInfos.get(i).setSamplingMethod(SamplingMethod.DONT_ARCHIVE);
                    logger.debug("Using a template PV for " + pvNames.get(i)
                            + " Need to determine the actual DBR type.");
                    setActualDBRTypeFromData(pvNames.get(i), typeInfos.get(i), configService);
                }
            }
        }

        if (typeInfos.get(i) == null) {
            String msg = "Unable to find typeinfo for pv " + pvNames.get(i);
            logger.error(msg);
            resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
            resp.sendError(HttpServletResponse.SC_NOT_FOUND, msg);
            return;
        }

        if (postProcessors.get(i) == null) {
            if (useReduced) {
                String defaultPPClassName = configService.getInstallationProperties().getProperty(
                        "org.epics.archiverappliance.retrieval.DefaultUseReducedPostProcessor",
                        FirstSamplePP.class.getName());
                logger.debug("Using the default usereduced preprocessor " + defaultPPClassName);
                try {
                    postProcessors.set(i, (PostProcessor) Class.forName(defaultPPClassName).newInstance());
                } catch (Exception ex) {
                    logger.error("Exception constructing new instance of post processor " + defaultPPClassName,
                            ex);
                    postProcessors.set(i, null);
                }
            }
        }

        if (postProcessors.get(i) == null) {
            logger.debug("Using the default raw preprocessor");
            postProcessors.set(i, new DefaultRawPostProcessor());
        }
    }

    // Get the appliances for each of the PVs
    List<ApplianceInfo> applianceForPVs = new ArrayList<ApplianceInfo>(pvNames.size());
    for (int i = 0; i < pvNames.size(); i++) {
        applianceForPVs.add(configService.getApplianceForPV(pvNames.get(i)));
        if (applianceForPVs.get(i) == null) {
            // TypeInfo cannot be null here...
            assert (typeInfos.get(i) != null);
            applianceForPVs.set(i, configService.getAppliance(typeInfos.get(i).getApplianceIdentity()));
        }
    }

    /*
     * Retrieving the external appliances if the current appliance has not got the PV assigned to it, and
     * storing the associated information of the PVs in that appliance.
     */
    Map<String, ArrayList<PVInfoForClusterRetrieval>> applianceToPVs = new HashMap<String, ArrayList<PVInfoForClusterRetrieval>>();
    for (int i = 0; i < pvNames.size(); i++) {
        if (!applianceForPVs.get(i).equals(configService.getMyApplianceInfo())) {

            ArrayList<PVInfoForClusterRetrieval> appliancePVs = applianceToPVs
                    .get(applianceForPVs.get(i).getMgmtURL());
            appliancePVs = (appliancePVs == null) ? new ArrayList<>() : appliancePVs;
            PVInfoForClusterRetrieval pvInfoForRetrieval = new PVInfoForClusterRetrieval(pvNames.get(i),
                    typeInfos.get(i), postProcessors.get(i), applianceForPVs.get(i));
            appliancePVs.add(pvInfoForRetrieval);
            applianceToPVs.put(applianceForPVs.get(i).getRetrievalURL(), appliancePVs);
        }
    }

    List<List<Future<EventStream>>> listOfEventStreamFuturesLists = new ArrayList<List<Future<EventStream>>>();
    Set<String> retrievalURLs = applianceToPVs.keySet();
    if (retrievalURLs.size() > 0) {
        // Get list of PVs and redirect them to appropriate appliance to be retrieved.
        String retrievalURL;
        ArrayList<PVInfoForClusterRetrieval> pvInfos;
        while (!((retrievalURL = retrievalURLs.iterator().next()) != null)) {
            // Get array list of PVs for appliance
            pvInfos = applianceToPVs.get(retrievalURL);
            try {
                List<List<Future<EventStream>>> resultFromForeignAppliances = retrieveEventStreamFromForeignAppliance(
                        req, resp, pvInfos, requestTimes, useChunkedEncoding,
                        retrievalURL + "/../data/getDataForPVs.raw", start, end);
                listOfEventStreamFuturesLists.addAll(resultFromForeignAppliances);
            } catch (Exception ex) {
                logger.error("Failed to retrieve " + StringUtils.join(pvNames, ", ") + " from " + retrievalURL
                        + ".");
                return;
            }
        }
    }

    pmansProfiler.mark("After Appliance Info");

    // Setting post processor for PVs, taking into account whether there is a field in the PV name
    List<String> pvNamesFromRequests = new ArrayList<String>(pvNames.size());
    for (int i = 0; i < pvNames.size(); i++) {
        String pvName = pvNames.get(i);
        pvNamesFromRequests.add(pvName);
        PVTypeInfo typeInfo = typeInfos.get(i);
        postProcessorUserArg = postProcessorUserArgs.get(i);

        // If a field is specified in a PV name, it will create a post processor for that
        String fieldName = PVNames.getFieldName(pvName);
        if (fieldName != null && !fieldName.equals("") && !pvName.equals(typeInfo.getPvName())) {
            logger.debug("We reset the pvName " + pvName + " to one from the typeinfo " + typeInfo.getPvName()
                    + " as that determines the name of the stream. " + "Also using ExtraFieldsPostProcessor.");
            pvNames.set(i, typeInfo.getPvName());
            postProcessors.set(i, new ExtraFieldsPostProcessor(fieldName));
        }

        try {
            // Postprocessors get their mandatory arguments from the request.
            // If user does not pass in the expected request, throw an exception.
            postProcessors.get(i).initialize(postProcessorUserArg, pvName);
        } catch (Exception ex) {
            String msg = "Postprocessor threw an exception during initialization for " + pvName;
            logger.error(msg, ex);
            resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
            resp.sendError(HttpServletResponse.SC_NOT_FOUND, msg);
            return;
        }
    }

    /*
     * MergeDedupConsumer is what writes PB data in its respective format to the HTML response.
     * The response, after the MergeDedupConsumer is created, contains the following:
     * 
     * 1) The content type for the response.
     * 2) Any additional headers for the particular MIME response.
     * 
     * Additionally, the MergeDedupConsumer instance holds a reference to the output stream
     * that is used to write to the HTML response. It is stored under the name `os`.
     */
    MergeDedupConsumer mergeDedupCountingConsumer;
    try {
        mergeDedupCountingConsumer = createMergeDedupConsumer(resp, extension, useChunkedEncoding);
    } catch (ServletException se) {
        String msg = "Exception when retrieving data " + "-->" + se.toString();
        logger.error(msg, se);
        resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
        resp.sendError(HttpServletResponse.SC_SERVICE_UNAVAILABLE, msg);
        return;
    }

    /* 
     * BasicContext contains the PV name and the expected return type. Used to access PB files.
     * RetrievalExecutorResult contains a thread service class and the time spans Presumably, the 
     * thread service is what retrieves the data, and the BasicContext is the context in which it 
     * works.
     */
    List<HashMap<String, String>> engineMetadatas = new ArrayList<HashMap<String, String>>();
    try {
        List<BasicContext> retrievalContexts = new ArrayList<BasicContext>(pvNames.size());
        List<RetrievalExecutorResult> executorResults = new ArrayList<RetrievalExecutorResult>(pvNames.size());
        for (int i = 0; i < pvNames.size(); i++) {
            if (fetchLatestMetadata) {
                // Make a call to the engine to fetch the latest metadata.
                engineMetadatas.add(fetchLatestMedataFromEngine(pvNames.get(i), applianceForPVs.get(i)));
            }
            retrievalContexts.add(new BasicContext(typeInfos.get(i).getDBRType(), pvNamesFromRequests.get(i)));
            executorResults.add(determineExecutorForPostProcessing(pvNames.get(i), typeInfos.get(i),
                    requestTimes, req, postProcessors.get(i)));
        }

        /*
         * There are as many Future objects in the eventStreamFutures List as there are periods over 
         * which to fetch data. Retrieval of data happen here in parallel.
         */
        List<LinkedList<Future<RetrievalResult>>> listOfRetrievalResultFuturesLists = new ArrayList<LinkedList<Future<RetrievalResult>>>();
        for (int i = 0; i < pvNames.size(); i++) {
            listOfRetrievalResultFuturesLists.add(resolveAllDataSources(pvNames.get(i), typeInfos.get(i),
                    postProcessors.get(i), applianceForPVs.get(i), retrievalContexts.get(i),
                    executorResults.get(i), req, resp));
        }
        pmansProfiler.mark("After data source resolution");

        for (int i = 0; i < pvNames.size(); i++) {
            // Data is retrieved here
            List<Future<EventStream>> eventStreamFutures = getEventStreamFuturesFromRetrievalResults(
                    executorResults.get(i), listOfRetrievalResultFuturesLists.get(i));
            listOfEventStreamFuturesLists.add(eventStreamFutures);
        }

    } catch (Exception ex) {
        if (ex != null && ex.toString() != null && ex.toString().contains("ClientAbortException")) {
            // We check for ClientAbortException etc this way to avoid including tomcat jars in the build path.
            logger.debug("Exception when retrieving data ", ex);
        } else {
            logger.error("Exception when retrieving data " + "-->" + ex.toString(), ex);
        }
    }

    long s1 = System.currentTimeMillis();
    String currentlyProcessingPV = null;

    /*
     * The following try bracket goes through each of the streams in the list of event stream futures.
     * 
     * It is intended that the process goes through one PV at a time.
     */
    try {
        for (int i = 0; i < pvNames.size(); i++) {
            List<Future<EventStream>> eventStreamFutures = listOfEventStreamFuturesLists.get(i);
            String pvName = pvNames.get(i);
            PVTypeInfo typeInfo = typeInfos.get(i);
            HashMap<String, String> engineMetadata = fetchLatestMetadata ? engineMetadatas.get(i) : null;
            PostProcessor postProcessor = postProcessors.get(i);

            logger.debug("Done with the RetrievalResults; moving onto the individual event stream "
                    + "from each source for " + StringUtils.join(pvNames, ", "));
            pmansProfiler.mark("After retrieval results");
            for (Future<EventStream> future : eventStreamFutures) {
                EventStreamDesc sourceDesc = null;

                // Gets the result of a data retrieval
                try (EventStream eventStream = future.get()) {
                    sourceDesc = null; // Reset it for each loop iteration.
                    sourceDesc = eventStream.getDescription();
                    if (sourceDesc == null) {
                        logger.warn("Skipping event stream without a desc for pv " + pvName);
                        continue;
                    }

                    logger.debug("Processing event stream for pv " + pvName + " from source "
                            + ((eventStream.getDescription() != null) ? eventStream.getDescription().getSource()
                                    : " unknown"));

                    try {
                        mergeTypeInfo(typeInfo, sourceDesc, engineMetadata);
                    } catch (MismatchedDBRTypeException mex) {
                        logger.error(mex.getMessage(), mex);
                        continue;
                    }

                    if (currentlyProcessingPV == null || !currentlyProcessingPV.equals(pvName)) {
                        logger.debug("Switching to new PV " + pvName + " In some mime responses we insert "
                                + "special headers at the beginning of the response. Calling the hook for "
                                + "that");
                        currentlyProcessingPV = pvName;
                        /*
                         * Goes through the PB data stream over a period of time. The relevant MIME response
                         * actually deal with the processing of the PV. `start` and `end` refer to the very
                         * beginning and very end of the time period being retrieved over, regardless of
                         * whether it is divided up or not.
                         */
                        mergeDedupCountingConsumer.processingPV(currentlyProcessingPV, start, end,
                                (eventStream != null) ? sourceDesc : null);
                    }

                    try {
                        // If the postProcessor does not have a consolidated event stream, we send each eventstream across as we encounter it.
                        // Else we send the consolidatedEventStream down below.
                        if (!(postProcessor instanceof PostProcessorWithConsolidatedEventStream)) {
                            /*
                             * The eventStream object contains all the data over the current period.
                             */
                            mergeDedupCountingConsumer.consumeEventStream(eventStream);
                            resp.flushBuffer();
                        }
                    } catch (Exception ex) {
                        if (ex != null && ex.toString() != null
                                && ex.toString().contains("ClientAbortException")) {
                            // We check for ClientAbortException etc this way to avoid including tomcat jars in the build path.
                            logger.debug(
                                    "Exception when consuming and flushing data from " + sourceDesc.getSource(),
                                    ex);
                        } else {
                            logger.error("Exception when consuming and flushing data from "
                                    + sourceDesc.getSource() + "-->" + ex.toString(), ex);
                        }
                    }
                    pmansProfiler.mark("After event stream " + eventStream.getDescription().getSource());
                } catch (Exception ex) {
                    if (ex != null && ex.toString() != null && ex.toString().contains("ClientAbortException")) {
                        // We check for ClientAbortException etc this way to avoid including tomcat jars in the build path.
                        logger.debug("Exception when consuming and flushing data from "
                                + (sourceDesc != null ? sourceDesc.getSource() : "N/A"), ex);
                    } else {
                        logger.error("Exception when consuming and flushing data from "
                                + (sourceDesc != null ? sourceDesc.getSource() : "N/A") + "-->" + ex.toString(),
                                ex);
                    }
                }
            }

            // TODO Go through data from other appliances here

            if (postProcessor instanceof PostProcessorWithConsolidatedEventStream) {
                try (EventStream eventStream = ((PostProcessorWithConsolidatedEventStream) postProcessor)
                        .getConsolidatedEventStream()) {
                    EventStreamDesc sourceDesc = eventStream.getDescription();
                    if (sourceDesc == null) {
                        logger.error("Skipping event stream without a desc for pv " + pvName
                                + " and post processor " + postProcessor.getExtension());
                    } else {
                        mergeDedupCountingConsumer.consumeEventStream(eventStream);
                        resp.flushBuffer();
                    }
                }
            }

            // If the postProcessor needs to send final data across, give it a chance now...
            if (postProcessor instanceof AfterAllStreams) {
                EventStream finalEventStream = ((AfterAllStreams) postProcessor).anyFinalData();
                if (finalEventStream != null) {
                    mergeDedupCountingConsumer.consumeEventStream(finalEventStream);
                    resp.flushBuffer();
                }
            }

            pmansProfiler.mark("After writing all eventstreams to response");
        }
    } catch (Exception ex) {
        if (ex != null && ex.toString() != null && ex.toString().contains("ClientAbortException")) {
            // We check for ClientAbortException etc this way to avoid including tomcat jars in the build path.
            logger.debug("Exception when retrieving data ", ex);
        } else {
            logger.error("Exception when retrieving data " + "-->" + ex.toString(), ex);
        }
    }

    long s2 = System.currentTimeMillis();
    logger.info("For the complete request, found a total of " + mergeDedupCountingConsumer.totalEventsForAllPVs
            + " in " + (s2 - s1) + "(ms)" + " skipping " + mergeDedupCountingConsumer.skippedEventsForAllPVs
            + " events" + " deduping involved " + mergeDedupCountingConsumer.comparedEventsForAllPVs
            + " compares.");

    pmansProfiler.mark("After all closes and flushing all buffers");

    // Till we determine all the if conditions where we log this, we log sparingly..
    if (pmansProfiler.totalTimeMS() > 5000) {
        logger.error("Retrieval time for " + StringUtils.join(pvNames, ", ") + " from " + startTimeStr + " to "
                + endTimeStr + ": " + pmansProfiler.toString());
    }

    mergeDedupCountingConsumer.close();
}

From source file:com.mothsoft.alexis.engine.predictive.OpenNLPMaxentModelExecutorTask.java

private boolean doExecute(final Model model) {
    final StopWatch stopWatch = new StopWatch();
    stopWatch.start();/*from   ww  w . j  a  v  a  2 s  .c o  m*/

    boolean result = false;

    try {
        logger.info(String.format("Executing model %d", model.getId()));

        // load model file
        final File userDirectory = new File(baseDirectory, "" + model.getUserId());
        final File modelFile = new File(userDirectory, model.getId() + BIN_GZ_EXT);
        final AbstractModel maxentModel = new SuffixSensitiveGISModelReader(modelFile).getModel();

        final Date now = new Date();
        final TimeUnits timeUnits = model.getTimeUnits();
        final Timestamp topOfPeriod = new Timestamp(TimeUnits.floor(now, timeUnits).getTime());
        final Timestamp endOfPeriod = new Timestamp(topOfPeriod.getTime() + timeUnits.getDuration() - 1);

        // first position: sum of changes predicted, second position: number
        // of samples--will calculate a boring old mean...
        final double[][] changeByPeriod = new double[model.getLookahead()][2];

        // initialize
        for (int i = 0; i < changeByPeriod.length; i++) {
            changeByPeriod[i][0] = 0.0d;
            changeByPeriod[i][1] = 0.0d;
        }

        // find the most recent point value
        // FIXME - some sparse data sets may require executing the model on
        // all documents since that point or applying some sort of
        // dead-reckoning logic for smoothing
        final DataSetPoint initial = this.dataSetPointDao.findLastPointBefore(model.getTrainingDataSet(),
                endOfPeriod);

        // let's get the corner cases out of the way
        if (initial == null) {
            logger.warn("Insufficient data to execute model!");
            return false;
        }

        // happy path
        // build consolidated context of events in this period
        // find current value of training data set for this period
        final double[] probs = eval(model, topOfPeriod, endOfPeriod, maxentModel);

        // predict from the last available point, adjusted for time
        // remaining in period
        final double y0 = initial.getY();

        // map outcomes to periods in the future (at least no earlier than
        // this period)
        for (int i = 0; i < probs.length; i++) {
            // in the form +nU:+/-x, where n is the number of periods, U is
            // the unit type for the period, +/- is the direction, and x is
            // a discrete value from Model.OUTCOME_ARRAY
            final String outcome = maxentModel.getOutcome(i);

            final Matcher matcher = OUTCOME_PATTERN.matcher(outcome);

            if (!matcher.matches()) {
                logger.warn("Can't process outcome: " + outcome + "; skipping");
                continue;
            }

            final int period = Integer.valueOf(matcher.group(1));
            final String units = matcher.group(2);
            final double percentChange = Double.valueOf(matcher.group(3));

            // record the observation and the count of observations
            changeByPeriod[period][0] += percentChange;
            changeByPeriod[period][1] += 1.0d;

            if (logger.isDebugEnabled()) {
                final double yi = y0 * (1 + percentChange);
                logger.debug(String.format("Outcome: %s, %s: +%d, change: %f, new value: %f, probability: %f",
                        outcome, units, period, percentChange, yi, probs[i]));
            }
        }

        // build points for predictive data set
        double yn = y0;

        // we need to track the points and remove any that were not
        // predicted by this execution of the model
        final Timestamp endOfPredictionRange = new Timestamp(
                topOfPeriod.getTime() + (changeByPeriod.length * timeUnits.getDuration()));
        final List<DataSetPoint> existingPoints = this.dataSetPointDao
                .findByTimeRange(model.getPredictionDataSet(), topOfPeriod, endOfPredictionRange);

        for (int period = 0; period < changeByPeriod.length; period++) {
            final double totalPercentChange = changeByPeriod[period][0];
            final double sampleCount = changeByPeriod[period][1];
            double percentChange;

            if (totalPercentChange == 0.0d || sampleCount == 0.0d) {
                percentChange = 0.0d;
            } else {
                percentChange = totalPercentChange / sampleCount;
            }

            // apply adjustments only if the initial point is within the
            // time period, and only for the first time period
            boolean applyAdjustment = period == 0 && topOfPeriod.before(initial.getX());

            if (applyAdjustment) {
                final double adjustmentFactor = findAdjustmentFactor(initial.getX(), timeUnits);
                percentChange = (totalPercentChange / sampleCount) * adjustmentFactor;
            }

            // figure out the next value and coerce to a sane number of
            // decimal places (2);
            final double newValue = (double) Math.round(yn * (1.0d + percentChange) * 100) / 100;

            final Timestamp timestamp = new Timestamp(
                    topOfPeriod.getTime() + (period * timeUnits.getDuration()));

            if (logger.isDebugEnabled()) {
                logger.debug(String.format("Model %d for data set %d predicted point: (%s, %f)", model.getId(),
                        model.getTrainingDataSet().getId(), DateFormat.getInstance().format(timestamp),
                        newValue));
            }

            DataSetPoint ithPoint = this.dataSetPointDao.findByTimestamp(model.getPredictionDataSet(),
                    timestamp);

            // conditionally create
            if (ithPoint == null) {
                ithPoint = new DataSetPoint(model.getPredictionDataSet(), timestamp, newValue);
                this.dataSetPointDao.add(ithPoint);
            } else {
                // or update
                ithPoint.setY(newValue);

                // updated points retained, other existing removed
                existingPoints.remove(ithPoint);
            }

            // store current and use as starting point for next iteration
            yn = newValue;
        }

        // remove stale points from an old model execution
        for (final DataSetPoint toRemove : existingPoints) {
            this.dataSetPointDao.remove(toRemove);
        }

        result = true;

    } catch (final Exception e) {
        logger.warn("Model " + model.getId() + " failed with: " + e, e);
        result = false;
    } finally {
        stopWatch.stop();
        logger.info(String.format("Executing model %d took %s", model.getId(), stopWatch.toString()));
    }

    return result;
}