Example usage for java.sql Timestamp toString

List of usage examples for java.sql Timestamp toString

Introduction

In this page you can find the example usage for java.sql Timestamp toString.

Prototype

@SuppressWarnings("deprecation")
public String toString() 

Source Link

Document

Formats a timestamp in JDBC timestamp escape format.

Usage

From source file:org.kawanfw.test.api.client.InsertAndUpdatePrepStatementTest.java

/**
 * Test that the values were correclty inserted
 * /*w w  w  .j  a va  2  s . co m*/
 * @param connection
 */
@SuppressWarnings("deprecation")
public void selectPrepStatementTest(Connection connection, int customerId, int orderId) throws Exception {
    int customer_id;
    int item_id;
    String description;
    BigDecimal cost_price;
    BigDecimal cost_price_scale;
    Date date_placed;
    Timestamp date_shipped;
    byte[] jpeg_image = null;
    boolean is_delivered;
    int quantity;

    String sql = "select * from orderlog where  customer_id = ? and item_id = ? ";

    PreparedStatement prepStatement = connection.prepareStatement(sql);

    int i = 1;
    prepStatement.setInt(i++, customerId);
    prepStatement.setInt(i++, orderId);

    ResultSet rs = prepStatement.executeQuery();

    MessageDisplayer.display("");

    SqlUtil sqlUtil = new SqlUtil(connection);

    while (rs.next()) {

        customer_id = rs.getInt("customer_id");
        item_id = rs.getInt("item_id");
        description = rs.getString("description");
        cost_price = rs.getBigDecimal("cost_price");
        cost_price_scale = rs.getBigDecimal("cost_price", 5);
        date_placed = rs.getDate("date_placed");
        date_shipped = rs.getTimestamp("date_shipped");
        jpeg_image = rs.getBytes("jpeg_image");

        if (sqlUtil.isIngres()) {
            is_delivered = (rs.getInt("is_delivered") == 1) ? true : false;
        } else {
            is_delivered = rs.getBoolean("is_delivered");
        }

        quantity = rs.getInt("quantity");

        MessageDisplayer.display("customer_id     : " + customer_id);
        MessageDisplayer.display("item_id         : " + item_id);
        MessageDisplayer.display("description     : " + description);
        MessageDisplayer.display("cost_price      : " + cost_price);
        MessageDisplayer.display("cost_price_scale: " + cost_price_scale);
        MessageDisplayer.display("date_placed     : " + date_placed);
        MessageDisplayer.display("date_shipped    : " + date_shipped);
        MessageDisplayer.display("jpeg_image      : " + jpeg_image);
        MessageDisplayer.display("is_delivered    : " + is_delivered);
        MessageDisplayer.display("quantity        : " + quantity);

        // Assert done on first 18 chars (ex: 2011-11-02 16:26:14), because
        // MySql Truncs
        // the remaining milliseconds
        Assert.assertEquals(dateShippedUpdated.toString().substring(0, 19),
                date_shipped.toString().substring(0, 19));

        if (new SqlUtil(connection).isSQLAnywhere()) {
            // Because SQLK Anywhere stores 5000.0000 instead of 5000 in db
            Assert.assertEquals(new BigDecimal(customer_id * increaseFactor).toString(),
                    cost_price.toString().substring(0, 4));
        } else {
            Assert.assertEquals(new BigDecimal(customer_id * increaseFactor).toString(), cost_price.toString());
        }

        Assert.assertEquals(true, is_delivered);

        Assert.assertEquals(customer_id * increaseFactor * 2, quantity);

        i = 1;
        customer_id = rs.getInt(i++);
        item_id = rs.getInt(i++);
        description = rs.getString(i++);
        int iForCostPrice = i;
        cost_price = rs.getBigDecimal(i++);
        cost_price_scale = rs.getBigDecimal(iForCostPrice, 5);
        date_placed = rs.getDate(i++);
        date_shipped = rs.getTimestamp(i++);

        // NO! do not read twice the same file ==> has been delete at first
        // read
        // jpeg_image = rs.getBytes(i++);
        i++;

        is_delivered = rs.getBoolean(i++);
        quantity = rs.getInt(i++);

        MessageDisplayer.display("");
        MessageDisplayer.display("customer_id     : " + customer_id);
        MessageDisplayer.display("item_id         : " + item_id);
        MessageDisplayer.display("description     : " + description);
        MessageDisplayer.display("cost_price      : " + cost_price);
        MessageDisplayer.display("cost_price_scale: " + cost_price_scale);
        MessageDisplayer.display("date_placed     : " + date_placed);
        MessageDisplayer.display("date_shipped    : " + date_shipped);
        MessageDisplayer.display("jpeg_image      : " + jpeg_image);
        MessageDisplayer.display("is_delivered    : " + is_delivered);
        MessageDisplayer.display("quantity        : " + quantity);

        // Assert done on first 18 chars (ex: 2011-11-02 16:26:14), because
        // MySql Truncs
        // the remaining milliseconds
        Assert.assertEquals(date_shipped.toString().substring(0, 18),
                dateShippedUpdated.toString().substring(0, 18));

        if (new SqlUtil(connection).isSQLAnywhere()) {
            // Because SQLK Anywhere stores 5000.0000 instead of 5000 in db
            Assert.assertEquals(new BigDecimal(customer_id * increaseFactor).toString(),
                    cost_price.toString().substring(0, 4));
        } else {
            Assert.assertEquals(new BigDecimal(customer_id * increaseFactor).toString(), cost_price.toString());
        }

        Assert.assertEquals(true, is_delivered);
        Assert.assertEquals(new Integer(customer_id * increaseFactor * 2), new Integer(quantity));
    }

    prepStatement.close();
    rs.close();

    MessageDisplayer.display("Select done!");

}

From source file:dao.DatasetsDAO.java

public static ObjectNode getPagedDatasets(String urn, Integer page, Integer size, String user) {
    ObjectNode result = Json.newObject();

    Integer userId = UserDAO.getUserIDByUserName(user);

    javax.sql.DataSource ds = getJdbcTemplate().getDataSource();
    DataSourceTransactionManager tm = new DataSourceTransactionManager(ds);
    TransactionTemplate txTemplate = new TransactionTemplate(tm);
    final Integer id = userId;

    result = txTemplate.execute(new TransactionCallback<ObjectNode>() {
        public ObjectNode doInTransaction(TransactionStatus status) {

            ObjectNode resultNode = Json.newObject();
            List<Dataset> pagedDatasets = new ArrayList<Dataset>();
            List<Map<String, Object>> rows = null;
            if (id != null && id > 0) {
                if (StringUtils.isBlank(urn)) {
                    rows = getJdbcTemplate().queryForList(SELECT_PAGED_DATASET_BY_CURRENT_USER,
                            (page - 1) * size, size, id, id);
                } else {
                    rows = getJdbcTemplate().queryForList(SELECT_PAGED_DATASET_BY_URN_CURRENT_USER, urn + "%",
                            (page - 1) * size, size, id, id);
                }//  w  w w .  j  ava2 s . co m
            } else {
                if (StringUtils.isBlank(urn)) {
                    rows = getJdbcTemplate().queryForList(SELECT_PAGED_DATASET, (page - 1) * size, size);
                } else {
                    rows = getJdbcTemplate().queryForList(SELECT_PAGED_DATASET_BY_URN, urn + "%",
                            (page - 1) * size, size);
                }

            }

            long count = 0;
            try {

                if (StringUtils.isBlank(urn)) {
                    count = getJdbcTemplate().queryForObject(GET_PAGED_DATASET_COUNT, Long.class);
                } else {
                    count = getJdbcTemplate().queryForObject(GET_PAGED_DATASET_COUNT_BY_URN, Long.class,
                            urn + "%");
                }
            } catch (EmptyResultDataAccessException e) {
                Logger.error("Exception = " + e.getMessage());
            }

            for (Map row : rows) {

                Dataset ds = new Dataset();
                Timestamp modified = (Timestamp) row.get(DatasetWithUserRowMapper.DATASET_MODIFIED_TIME_COLUMN);
                ds.id = (Long) row.get(DatasetWithUserRowMapper.DATASET_ID_COLUMN);
                ds.name = (String) row.get(DatasetWithUserRowMapper.DATASET_NAME_COLUMN);
                ds.source = (String) row.get(DatasetWithUserRowMapper.DATASET_SOURCE_COLUMN);
                ds.urn = (String) row.get(DatasetWithUserRowMapper.DATASET_URN_COLUMN);
                ds.schema = (String) row.get(DatasetWithUserRowMapper.DATASET_SCHEMA_COLUMN);
                String strOwner = (String) row.get(DatasetWithUserRowMapper.DATASET_OWNER_ID_COLUMN);
                String strOwnerName = (String) row.get(DatasetWithUserRowMapper.DATASET_OWNER_NAME_COLUMN);
                Long sourceModifiedTime = (Long) row
                        .get(DatasetWithUserRowMapper.DATASET_SOURCE_MODIFIED_TIME_COLUMN);
                String properties = (String) row.get(DatasetWithUserRowMapper.DATASET_PROPERTIES_COLUMN);
                try {
                    if (StringUtils.isNotBlank(properties)) {
                        ds.properties = Json.parse(properties);
                    }
                } catch (Exception e) {
                    Logger.error(e.getMessage());
                }

                if (modified != null && sourceModifiedTime != null && sourceModifiedTime > 0) {
                    ds.modified = modified;
                    ds.formatedModified = modified.toString();
                }

                String[] owners = null;
                if (StringUtils.isNotBlank(strOwner)) {
                    owners = strOwner.split(",");
                }
                String[] ownerNames = null;
                if (StringUtils.isNotBlank(strOwnerName)) {
                    ownerNames = strOwnerName.split(",");
                }
                ds.owners = new ArrayList<User>();
                if (owners != null && ownerNames != null) {
                    if (owners.length == ownerNames.length) {
                        for (int i = 0; i < owners.length; i++) {
                            User datasetOwner = new User();
                            datasetOwner.userName = owners[i];
                            if (datasetOwner.userName.equalsIgnoreCase(user)) {
                                ds.isOwned = true;
                            }
                            if (StringUtils.isBlank(ownerNames[i]) || ownerNames[i].equalsIgnoreCase("*")) {
                                datasetOwner.name = owners[i];
                            } else {
                                datasetOwner.name = ownerNames[i];
                            }
                            ds.owners.add(datasetOwner);
                        }
                    } else {
                        Logger.error("getPagedDatasets get wrong owner and names. Dataset ID: "
                                + Long.toString(ds.id) + " Owner: " + owners + " Owner names: " + ownerNames);
                    }
                }

                Integer favoriteId = (Integer) row.get(DatasetWithUserRowMapper.FAVORITE_DATASET_ID_COLUMN);
                Long watchId = (Long) row.get(DatasetWithUserRowMapper.DATASET_WATCH_ID_COLUMN);

                Long schemaHistoryRecordCount = 0L;
                try {
                    schemaHistoryRecordCount = getJdbcTemplate().queryForObject(CHECK_SCHEMA_HISTORY,
                            Long.class, ds.id);
                } catch (EmptyResultDataAccessException e) {
                    Logger.error("Exception = " + e.getMessage());
                }

                if (StringUtils.isNotBlank(ds.urn)) {
                    if (ds.urn.substring(0, 4).equalsIgnoreCase(DatasetRowMapper.HDFS_PREFIX)) {
                        ds.hdfsBrowserLink = Play.application().configuration().getString(HDFS_BROWSER_URL_KEY)
                                + ds.urn.substring(DatasetRowMapper.HDFS_URN_PREFIX_LEN);
                    }
                }
                if (favoriteId != null && favoriteId > 0) {
                    ds.isFavorite = true;
                } else {
                    ds.isFavorite = false;
                }
                if (watchId != null && watchId > 0) {
                    ds.watchId = watchId;
                    ds.isWatched = true;
                } else {
                    ds.isWatched = false;
                    ds.watchId = 0L;
                }
                if (schemaHistoryRecordCount != null && schemaHistoryRecordCount > 0) {
                    ds.hasSchemaHistory = true;
                } else {
                    ds.hasSchemaHistory = false;
                }
                pagedDatasets.add(ds);
            }

            resultNode.put("count", count);
            resultNode.put("page", page);
            resultNode.put("itemsPerPage", size);
            resultNode.put("totalPages", (int) Math.ceil(count / ((double) size)));
            resultNode.set("datasets", Json.toJson(pagedDatasets));
            return resultNode;
        }
    });
    return result;
}

From source file:org.ramadda.repository.database.DatabaseManager.java

/**
 * _more_//  ww  w  .  ja v  a2 s .co m
 *
 * @param os _more_
 * @param all _more_
 *
 * @throws Exception _more_
 */
public void makeDatabaseCopyxxx(OutputStream os, boolean all) throws Exception {

    Connection connection = getConnection();
    try {
        DatabaseMetaData dbmd = connection.getMetaData();
        ResultSet catalogs = dbmd.getCatalogs();
        ResultSet tables = dbmd.getTables(null, null, null, new String[] { "TABLE" });

        ResultSetMetaData rsmd = tables.getMetaData();
        for (int col = 1; col <= rsmd.getColumnCount(); col++) {
            System.err.println(rsmd.getColumnName(col));
        }
        int totalRowCnt = 0;
        while (tables.next()) {
            //                String tableName = tables.getString("Tables.NAME.NAME");
            //                String tableType = tables.getString("Tables.TYPE.NAME");
            String tableName = tables.getString("TABLE_NAME");
            String tableType = tables.getString("TABLE_TYPE");
            if ((tableType == null) || Misc.equals(tableType, "INDEX") || tableType.startsWith("SYSTEM")) {
                continue;
            }

            String tn = tableName.toLowerCase();
            if (!all) {
                if (tn.equals(Tables.GLOBALS.NAME) || tn.equals(Tables.USERS.NAME)
                        || tn.equals(Tables.PERMISSIONS.NAME) || tn.equals(Tables.HARVESTERS.NAME)
                        || tn.equals(Tables.USERROLES.NAME)) {
                    continue;
                }
            }

            ResultSet cols = dbmd.getColumns(null, null, tableName, null);

            int colCnt = 0;

            String colNames = null;
            List types = new ArrayList();
            while (cols.next()) {
                String colName = cols.getString("COLUMN_NAME");
                if (colNames == null) {
                    colNames = " (";
                } else {
                    colNames += ",";
                }
                colNames += colName;
                int type = cols.getInt("DATA_TYPE");
                types.add(type);
                colCnt++;
            }
            colNames += ") ";

            Statement statement = execute("select * from " + tableName, 10000000, 0);
            SqlUtil.Iterator iter = getIterator(statement);
            ResultSet results;
            int rowCnt = 0;
            List valueList = new ArrayList();
            boolean didDelete = false;
            while ((results = iter.getNext()) != null) {
                if (!didDelete) {
                    didDelete = true;
                    IOUtil.write(os, "delete from  " + tableName.toLowerCase() + ";\n");
                }
                totalRowCnt++;
                rowCnt++;
                StringBuffer value = new StringBuffer("(");
                for (int i = 1; i <= colCnt; i++) {
                    int type = ((Integer) types.get(i - 1)).intValue();
                    if (i > 1) {
                        value.append(",");
                    }
                    if (type == java.sql.Types.TIMESTAMP) {
                        Timestamp ts = results.getTimestamp(i);
                        //                            sb.append(SqlUtil.format(new Date(ts.getTime())));
                        if (ts == null) {
                            value.append("null");
                        } else {
                            value.append(HtmlUtils.squote(ts.toString()));
                        }

                    } else if (type == java.sql.Types.VARCHAR) {
                        String s = results.getString(i);
                        if (s != null) {
                            //If the target isn't mysql:
                            //s = s.replace("'", "''");
                            //If the target is mysql:
                            s = s.replace("'", "\\'");
                            s = s.replace("\r", "\\r");
                            s = s.replace("\n", "\\n");
                            value.append("'" + s + "'");
                        } else {
                            value.append("null");
                        }
                    } else {
                        String s = results.getString(i);
                        value.append(s);
                    }
                }
                value.append(")");
                valueList.add(value.toString());
                if (valueList.size() > 50) {
                    IOUtil.write(os, "insert into " + tableName.toLowerCase() + colNames + " values ");
                    IOUtil.write(os, StringUtil.join(",", valueList));
                    IOUtil.write(os, ";\n");
                    valueList = new ArrayList();
                }
            }
            if (valueList.size() > 0) {
                if (!didDelete) {
                    didDelete = true;
                    IOUtil.write(os, "delete from  " + tableName.toLowerCase() + ";\n");
                }
                IOUtil.write(os, "insert into " + tableName.toLowerCase() + colNames + " values ");
                IOUtil.write(os, StringUtil.join(",", valueList));
                IOUtil.write(os, ";\n");
            }
        }
    } finally {
        closeConnection(connection);
    }

}

From source file:csiro.pidsvc.mappingstore.Manager.java

protected String exportMappingsImpl(Object mappingIdentifier, String scope, boolean fullBackup, String source,
        boolean preserveDatesForDeprecatedMappings, boolean includeConditionSets, boolean includeLookupMaps)
        throws SQLException {
    PreparedStatement pst = null;
    ResultSet rs = null;//  ww  w  . java 2s. c  o  m
    String ret = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<backup type=\""
            + (fullBackup ? "full" : "partial") + "\" scope=\"" + scope
            + "\" xmlns=\"urn:csiro:xmlns:pidsvc:backup:1.0\">";
    int defaultActionId;
    Timestamp timeStamp;
    String buf, path;

    try {
        if (mappingIdentifier instanceof Integer) {
            if ((Integer) mappingIdentifier == 0) {
                // Catch all mapping.
                pst = _connection.prepareStatement(
                        "SELECT * FROM " + source + " WHERE mapping_path IS NULL ORDER BY mapping_id");
            } else {
                pst = _connection.prepareStatement(
                        "SELECT * FROM " + source + " WHERE mapping_id = ? ORDER BY mapping_id");
                pst.setInt(1, (Integer) mappingIdentifier);
            }
        } else {
            // Export mapping by path or all mappings.
            pst = _connection.prepareStatement("SELECT * FROM " + source
                    + (mappingIdentifier == null ? "" : " WHERE mapping_path = ?") + " ORDER BY mapping_id");
            if (mappingIdentifier != null)
                pst.setString(1, (String) mappingIdentifier);
        }

        if (pst.execute()) {
            for (rs = pst.getResultSet(); rs.next();) {
                path = rs.getString(
                        mappingIdentifier instanceof Integer || !fullBackup ? "original_path" : "mapping_path");

                ret += "<mapping";

                // Time stamps are only applicable for full backups and deprecated records.
                if (fullBackup || !fullBackup && preserveDatesForDeprecatedMappings
                        && rs.getTimestamp("date_end") != null) {
                    timeStamp = rs.getTimestamp("date_start");
                    if (timeStamp != null)
                        ret += " date_start=\"" + timeStamp.toString().replace(" ", "T") + "Z\"";
                    timeStamp = rs.getTimestamp("date_end");
                    if (timeStamp != null)
                        ret += " date_end=\"" + timeStamp.toString().replace(" ", "T") + "Z\"";
                }

                // Preserve original mapping path for full backups.
                if (fullBackup && path != null)
                    ret += " original_path=\"" + rs.getString("original_path") + "\"";

                ret += ">"; // mapping

                ret += (path == null ? "<path/>" : "<path>" + StringEscapeUtils.escapeXml(path) + "</path>");
                buf = rs.getString("parent");
                if (buf != null)
                    ret += "<parent>" + StringEscapeUtils.escapeXml(buf) + "</parent>";
                ret += "<type>" + rs.getString("type") + "</type>";
                buf = rs.getString("title");
                if (buf != null)
                    ret += "<title>" + StringEscapeUtils.escapeXml(buf) + "</title>";
                buf = rs.getString("description");
                if (buf != null)
                    ret += "<description>" + StringEscapeUtils.escapeXml(buf) + "</description>";
                buf = rs.getString("creator");
                if (buf != null)
                    ret += "<creator>" + StringEscapeUtils.escapeXml(buf) + "</creator>";
                buf = rs.getString("commit_note");
                if (buf != null)
                    ret += "<commitNote>" + StringEscapeUtils.escapeXml(buf) + "</commitNote>";

                // Default action.
                defaultActionId = rs.getInt("default_action_id");
                if (!rs.wasNull()) {
                    csiro.pidsvc.mappingstore.action.Descriptor action = getAction(defaultActionId);
                    ret += "<action>";
                    ret += "<type>" + action.Type + "</type>";
                    if (action.Name != null)
                        ret += "<name>" + StringEscapeUtils.escapeXml(action.Name) + "</name>";
                    if (action.Value != null)
                        ret += "<value>" + StringEscapeUtils.escapeXml(action.Value) + "</value>";
                    buf = rs.getString("default_action_description");
                    if (buf != null)
                        ret += "<description>" + StringEscapeUtils.escapeXml(buf) + "</description>";
                    ret += "</action>";
                }

                // Conditions.
                ret += exportConditionsByMappingId(rs.getInt("mapping_id"));

                ret += "</mapping>";
            }
        }

        // Condition sets.
        if (includeConditionSets)
            ret += exportConditionSetImpl(null);

        // Lookup maps.
        if (includeLookupMaps)
            ret += exportLookupImpl(null);
    } catch (Exception e) {
        _logger.error(e);
    } finally {
        if (rs != null)
            rs.close();
        if (pst != null)
            pst.close();
    }
    ret += "</backup>";
    return ret;
}

From source file:org.epics.archiverappliance.retrieval.DataRetrievalServlet.java

private void doGetSinglePV(HttpServletRequest req, HttpServletResponse resp)
        throws ServletException, IOException {

    PoorMansProfiler pmansProfiler = new PoorMansProfiler();
    String pvName = req.getParameter("pv");

    if (configService.getStartupState() != STARTUP_SEQUENCE.STARTUP_COMPLETE) {
        String msg = "Cannot process data retrieval requests for PV " + pvName
                + " until the appliance has completely started up.";
        logger.error(msg);// ww w .  ja  va  2  s . co m
        resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
        resp.sendError(HttpServletResponse.SC_SERVICE_UNAVAILABLE, msg);
        return;
    }

    String startTimeStr = req.getParameter("from");
    String endTimeStr = req.getParameter("to");
    boolean useReduced = false;
    String useReducedStr = req.getParameter("usereduced");
    if (useReducedStr != null && !useReducedStr.equals("")) {
        try {
            useReduced = Boolean.parseBoolean(useReducedStr);
        } catch (Exception ex) {
            logger.error("Exception parsing usereduced", ex);
            useReduced = false;
        }
    }
    String extension = req.getPathInfo().split("\\.")[1];
    logger.info("Mime is " + extension);

    boolean useChunkedEncoding = true;
    String doNotChunkStr = req.getParameter("donotchunk");
    if (doNotChunkStr != null && !doNotChunkStr.equals("false")) {
        logger.info("Turning off HTTP chunked encoding");
        useChunkedEncoding = false;
    }

    boolean fetchLatestMetadata = false;
    String fetchLatestMetadataStr = req.getParameter("fetchLatestMetadata");
    if (fetchLatestMetadataStr != null && fetchLatestMetadataStr.equals("true")) {
        logger.info("Adding a call to the engine to fetch the latest metadata");
        fetchLatestMetadata = true;
    }

    // For data retrieval we need a PV info. However, in case of PV's that have long since retired, we may not want to have PVTypeInfo's in the system.
    // So, we support a template PV that lays out the data sources.
    // During retrieval, you can pass in the PV as a template and we'll clone this and make a temporary copy.
    String retiredPVTemplate = req.getParameter("retiredPVTemplate");

    if (pvName == null) {
        String msg = "PV name is null.";
        resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
        resp.sendError(HttpServletResponse.SC_BAD_REQUEST, msg);
        return;
    }

    if (pvName.equals(ARCH_APPL_PING_PV)) {
        logger.debug("Processing ping PV - this is used to validate the connection with the client.");
        processPingPV(req, resp);
        return;
    }

    if (pvName.endsWith(".VAL")) {
        int len = pvName.length();
        pvName = pvName.substring(0, len - 4);
        logger.info("Removing .VAL from pvName for request giving " + pvName);
    }

    // ISO datetimes are of the form "2011-02-02T08:00:00.000Z"
    Timestamp end = TimeUtils.plusHours(TimeUtils.now(), 1);
    if (endTimeStr != null) {
        try {
            end = TimeUtils.convertFromISO8601String(endTimeStr);
        } catch (IllegalArgumentException ex) {
            try {
                end = TimeUtils.convertFromDateTimeStringWithOffset(endTimeStr);
            } catch (IllegalArgumentException ex2) {
                String msg = "Cannot parse time" + endTimeStr;
                logger.warn(msg, ex2);
                resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
                resp.sendError(HttpServletResponse.SC_BAD_REQUEST, msg);
                return;
            }
        }
    }

    // We get one day by default
    Timestamp start = TimeUtils.minusDays(end, 1);
    if (startTimeStr != null) {
        try {
            start = TimeUtils.convertFromISO8601String(startTimeStr);
        } catch (IllegalArgumentException ex) {
            try {
                start = TimeUtils.convertFromDateTimeStringWithOffset(startTimeStr);
            } catch (IllegalArgumentException ex2) {
                String msg = "Cannot parse time " + startTimeStr;
                logger.warn(msg, ex2);
                resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
                resp.sendError(HttpServletResponse.SC_BAD_REQUEST, msg);
                return;
            }
        }
    }

    if (end.before(start)) {
        String msg = "For request, end " + end.toString() + " is before start " + start.toString() + " for pv "
                + pvName;
        logger.error(msg);
        resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
        resp.sendError(HttpServletResponse.SC_BAD_REQUEST);
        return;
    }

    LinkedList<TimeSpan> requestTimes = new LinkedList<TimeSpan>();

    // We can specify a list of time stamp pairs using the optional timeranges parameter
    String timeRangesStr = req.getParameter("timeranges");
    if (timeRangesStr != null) {
        boolean continueWithRequest = parseTimeRanges(resp, pvName, requestTimes, timeRangesStr);
        if (!continueWithRequest) {
            // Cannot parse the time ranges properly; we so abort the request.
            return;
        }

        // Override the start and the end so that the mergededup consumer works correctly.
        start = requestTimes.getFirst().getStartTime();
        end = requestTimes.getLast().getEndTime();

    } else {
        requestTimes.add(new TimeSpan(start, end));
    }

    assert (requestTimes.size() > 0);

    String postProcessorUserArg = req.getParameter("pp");
    if (pvName.contains("(")) {
        if (!pvName.contains(")")) {
            logger.error("Unbalanced paran " + pvName);
            resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
            resp.sendError(HttpServletResponse.SC_BAD_REQUEST);
            return;
        }
        String[] components = pvName.split("[(,)]");
        postProcessorUserArg = components[0];
        pvName = components[1];
        if (components.length > 2) {
            for (int i = 2; i < components.length; i++) {
                postProcessorUserArg = postProcessorUserArg + "_" + components[i];
            }
        }
        logger.info("After parsing the function call syntax pvName is " + pvName
                + " and postProcessorUserArg is " + postProcessorUserArg);
    }

    PostProcessor postProcessor = PostProcessors.findPostProcessor(postProcessorUserArg);

    PVTypeInfo typeInfo = PVNames.determineAppropriatePVTypeInfo(pvName, configService);
    pmansProfiler.mark("After PVTypeInfo");

    if (typeInfo == null && RetrievalState.includeExternalServers(req)) {
        logger.debug("Checking to see if pv " + pvName + " is served by a external Archiver Server");
        typeInfo = checkIfPVisServedByExternalServer(pvName, start, req, resp, useChunkedEncoding);
    }

    if (typeInfo == null) {
        if (resp.isCommitted()) {
            logger.debug("Proxied the data thru an external server for PV " + pvName);
            return;
        }
    }

    if (typeInfo == null) {
        if (retiredPVTemplate != null) {
            PVTypeInfo templateTypeInfo = PVNames.determineAppropriatePVTypeInfo(retiredPVTemplate,
                    configService);
            if (templateTypeInfo != null) {
                typeInfo = new PVTypeInfo(pvName, templateTypeInfo);
                typeInfo.setPaused(true);
                typeInfo.setApplianceIdentity(configService.getMyApplianceInfo().getIdentity());
                // Somehow tell the code downstream that this is a fake typeInfo.
                typeInfo.setSamplingMethod(SamplingMethod.DONT_ARCHIVE);
                logger.debug("Using a template PV for " + pvName + " Need to determine the actual DBR type.");
                setActualDBRTypeFromData(pvName, typeInfo, configService);
            }
        }
    }

    if (typeInfo == null) {
        logger.error("Unable to find typeinfo for pv " + pvName);
        resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
        resp.sendError(HttpServletResponse.SC_NOT_FOUND);
        return;
    }

    if (postProcessor == null) {
        if (useReduced) {
            String defaultPPClassName = configService.getInstallationProperties().getProperty(
                    "org.epics.archiverappliance.retrieval.DefaultUseReducedPostProcessor",
                    FirstSamplePP.class.getName());
            logger.debug("Using the default usereduced preprocessor " + defaultPPClassName);
            try {
                postProcessor = (PostProcessor) Class.forName(defaultPPClassName).newInstance();
            } catch (Exception ex) {
                logger.error("Exception constructing new instance of post processor " + defaultPPClassName, ex);
                postProcessor = null;
            }
        }
    }

    if (postProcessor == null) {
        logger.debug("Using the default raw preprocessor");
        postProcessor = new DefaultRawPostProcessor();
    }

    ApplianceInfo applianceForPV = configService.getApplianceForPV(pvName);
    if (applianceForPV == null) {
        // TypeInfo cannot be null here...
        assert (typeInfo != null);
        applianceForPV = configService.getAppliance(typeInfo.getApplianceIdentity());
    }

    if (!applianceForPV.equals(configService.getMyApplianceInfo())) {
        // Data for pv is elsewhere. Proxy/redirect and return.
        proxyRetrievalRequest(req, resp, pvName, useChunkedEncoding,
                applianceForPV.getRetrievalURL() + "/../data");
        return;
    }

    pmansProfiler.mark("After Appliance Info");

    String pvNameFromRequest = pvName;

    String fieldName = PVNames.getFieldName(pvName);
    if (fieldName != null && !fieldName.equals("") && !pvName.equals(typeInfo.getPvName())) {
        logger.debug("We reset the pvName " + pvName + " to one from the typeinfo " + typeInfo.getPvName()
                + " as that determines the name of the stream. Also using ExtraFieldsPostProcessor");
        pvName = typeInfo.getPvName();
        postProcessor = new ExtraFieldsPostProcessor(fieldName);
    }

    try {
        // Postprocessors get their mandatory arguments from the request.
        // If user does not pass in the expected request, throw an exception.
        postProcessor.initialize(postProcessorUserArg, pvName);
    } catch (Exception ex) {
        logger.error("Postprocessor threw an exception during initialization for " + pvName, ex);
        resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
        resp.sendError(HttpServletResponse.SC_NOT_FOUND);
        return;
    }

    try (BasicContext retrievalContext = new BasicContext(typeInfo.getDBRType(), pvNameFromRequest);
            MergeDedupConsumer mergeDedupCountingConsumer = createMergeDedupConsumer(resp, extension,
                    useChunkedEncoding);
            RetrievalExecutorResult executorResult = determineExecutorForPostProcessing(pvName, typeInfo,
                    requestTimes, req, postProcessor)) {
        HashMap<String, String> engineMetadata = null;
        if (fetchLatestMetadata) {
            // Make a call to the engine to fetch the latest metadata.
            engineMetadata = fetchLatestMedataFromEngine(pvName, applianceForPV);
        }

        LinkedList<Future<RetrievalResult>> retrievalResultFutures = resolveAllDataSources(pvName, typeInfo,
                postProcessor, applianceForPV, retrievalContext, executorResult, req, resp);
        pmansProfiler.mark("After data source resolution");

        long s1 = System.currentTimeMillis();
        String currentlyProcessingPV = null;

        List<Future<EventStream>> eventStreamFutures = getEventStreamFuturesFromRetrievalResults(executorResult,
                retrievalResultFutures);

        logger.debug(
                "Done with the RetrievalResult's; moving onto the individual event stream from each source for "
                        + pvName);
        pmansProfiler.mark("After retrieval results");

        for (Future<EventStream> future : eventStreamFutures) {
            EventStreamDesc sourceDesc = null;
            try (EventStream eventStream = future.get()) {
                sourceDesc = null; // Reset it for each loop iteration.
                sourceDesc = eventStream.getDescription();
                if (sourceDesc == null) {
                    logger.warn("Skipping event stream without a desc for pv " + pvName);
                    continue;
                }

                logger.debug("Processing event stream for pv " + pvName + " from source "
                        + ((eventStream.getDescription() != null) ? eventStream.getDescription().getSource()
                                : " unknown"));

                try {
                    mergeTypeInfo(typeInfo, sourceDesc, engineMetadata);
                } catch (MismatchedDBRTypeException mex) {
                    logger.error(mex.getMessage(), mex);
                    continue;
                }

                if (currentlyProcessingPV == null || !currentlyProcessingPV.equals(pvName)) {
                    logger.debug("Switching to new PV " + pvName
                            + " In some mime responses we insert special headers at the beginning of the response. Calling the hook for that");
                    currentlyProcessingPV = pvName;
                    mergeDedupCountingConsumer.processingPV(currentlyProcessingPV, start, end,
                            (eventStream != null) ? sourceDesc : null);
                }

                try {
                    // If the postProcessor does not have a consolidated event stream, we send each eventstream across as we encounter it.
                    // Else we send the consolidatedEventStream down below.
                    if (!(postProcessor instanceof PostProcessorWithConsolidatedEventStream)) {
                        mergeDedupCountingConsumer.consumeEventStream(eventStream);
                        resp.flushBuffer();
                    }
                } catch (Exception ex) {
                    if (ex != null && ex.toString() != null && ex.toString().contains("ClientAbortException")) {
                        // We check for ClientAbortException etc this way to avoid including tomcat jars in the build path.
                        logger.debug(
                                "Exception when consuming and flushing data from " + sourceDesc.getSource(),
                                ex);
                    } else {
                        logger.error("Exception when consuming and flushing data from " + sourceDesc.getSource()
                                + "-->" + ex.toString(), ex);
                    }
                }
                pmansProfiler.mark("After event stream " + eventStream.getDescription().getSource());
            } catch (Exception ex) {
                if (ex != null && ex.toString() != null && ex.toString().contains("ClientAbortException")) {
                    // We check for ClientAbortException etc this way to avoid including tomcat jars in the build path.
                    logger.debug("Exception when consuming and flushing data from "
                            + (sourceDesc != null ? sourceDesc.getSource() : "N/A"), ex);
                } else {
                    logger.error("Exception when consuming and flushing data from "
                            + (sourceDesc != null ? sourceDesc.getSource() : "N/A") + "-->" + ex.toString(),
                            ex);
                }
            }
        }

        if (postProcessor instanceof PostProcessorWithConsolidatedEventStream) {
            try (EventStream eventStream = ((PostProcessorWithConsolidatedEventStream) postProcessor)
                    .getConsolidatedEventStream()) {
                EventStreamDesc sourceDesc = eventStream.getDescription();
                if (sourceDesc == null) {
                    logger.error("Skipping event stream without a desc for pv " + pvName
                            + " and post processor " + postProcessor.getExtension());
                } else {
                    mergeDedupCountingConsumer.consumeEventStream(eventStream);
                    resp.flushBuffer();
                }
            }
        }

        // If the postProcessor needs to send final data across, give it a chance now...
        if (postProcessor instanceof AfterAllStreams) {
            EventStream finalEventStream = ((AfterAllStreams) postProcessor).anyFinalData();
            if (finalEventStream != null) {
                mergeDedupCountingConsumer.consumeEventStream(finalEventStream);
                resp.flushBuffer();
            }
        }

        pmansProfiler.mark("After writing all eventstreams to response");

        long s2 = System.currentTimeMillis();
        logger.info("For the complete request, found a total of "
                + mergeDedupCountingConsumer.totalEventsForAllPVs + " in " + (s2 - s1) + "(ms)" + " skipping "
                + mergeDedupCountingConsumer.skippedEventsForAllPVs + " events" + " deduping involved "
                + mergeDedupCountingConsumer.comparedEventsForAllPVs + " compares.");
    } catch (Exception ex) {
        if (ex != null && ex.toString() != null && ex.toString().contains("ClientAbortException")) {
            // We check for ClientAbortException etc this way to avoid including tomcat jars in the build path.
            logger.debug("Exception when retrieving data ", ex);
        } else {
            logger.error("Exception when retrieving data " + "-->" + ex.toString(), ex);
        }
    }
    pmansProfiler.mark("After all closes and flushing all buffers");

    // Till we determine all the if conditions where we log this, we log sparingly..
    if (pmansProfiler.totalTimeMS() > 5000) {
        logger.error("Retrieval time for " + pvName + " from " + startTimeStr + " to " + endTimeStr
                + pmansProfiler.toString());
    }
}

From source file:org.epics.archiverappliance.retrieval.DataRetrievalServlet.java

private void doGetMultiPV(HttpServletRequest req, HttpServletResponse resp)
        throws ServletException, IOException {

    PoorMansProfiler pmansProfiler = new PoorMansProfiler();

    // Gets the list of PVs specified by the `pv` parameter
    // String arrays might be inefficient for retrieval. In any case, they are sorted, which is essential later on.
    List<String> pvNames = Arrays.asList(req.getParameterValues("pv"));

    // Ensuring that the AA has finished starting up before requests are accepted.
    if (configService.getStartupState() != STARTUP_SEQUENCE.STARTUP_COMPLETE) {
        String msg = "Cannot process data retrieval requests for specified PVs ("
                + StringUtils.join(pvNames, ", ") + ") until the appliance has completely started up.";
        logger.error(msg);//from w  ww .j a v a  2 s  .  c  om
        resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
        resp.sendError(HttpServletResponse.SC_SERVICE_UNAVAILABLE, msg);
        return;
    }

    // Getting various fields from arguments
    String startTimeStr = req.getParameter("from");
    String endTimeStr = req.getParameter("to");
    boolean useReduced = false;
    String useReducedStr = req.getParameter("usereduced");
    if (useReducedStr != null && !useReducedStr.equals("")) {
        try {
            useReduced = Boolean.parseBoolean(useReducedStr);
        } catch (Exception ex) {
            logger.error("Exception parsing usereduced", ex);
            useReduced = false;
        }
    }

    // Getting MIME type
    String extension = req.getPathInfo().split("\\.")[1];
    logger.info("Mime is " + extension);

    if (!extension.equals("json") && !extension.equals("raw") && !extension.equals("jplot")
            && !extension.equals("qw")) {
        String msg = "Mime type " + extension + " is not supported. Please use \"json\", \"jplot\" or \"raw\".";
        resp.setHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
        resp.sendError(HttpServletResponse.SC_BAD_REQUEST, msg);
        return;
    }

    boolean useChunkedEncoding = true;
    String doNotChunkStr = req.getParameter("donotchunk");
    if (doNotChunkStr != null && !doNotChunkStr.equals("false")) {
        logger.info("Turning off HTTP chunked encoding");
        useChunkedEncoding = false;
    }

    boolean fetchLatestMetadata = false;
    String fetchLatestMetadataStr = req.getParameter("fetchLatestMetadata");
    if (fetchLatestMetadataStr != null && fetchLatestMetadataStr.equals("true")) {
        logger.info("Adding a call to the engine to fetch the latest metadata");
        fetchLatestMetadata = true;
    }

    // For data retrieval we need a PV info. However, in case of PV's that have long since retired, we may not want to have PVTypeInfo's in the system.
    // So, we support a template PV that lays out the data sources.
    // During retrieval, you can pass in the PV as a template and we'll clone this and make a temporary copy.
    String retiredPVTemplate = req.getParameter("retiredPVTemplate");

    // Goes through given PVs and returns bad request error.
    int nullPVs = 0;
    for (String pvName : pvNames) {
        if (pvName == null) {
            nullPVs++;
        }
        if (nullPVs > 0) {
            logger.warn("Some PVs are null in the request.");
            resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
            resp.sendError(HttpServletResponse.SC_BAD_REQUEST);
            return;
        }
    }

    if (pvNames.toString().matches("^.*" + ARCH_APPL_PING_PV + ".*$")) {
        logger.debug("Processing ping PV - this is used to validate the connection with the client.");
        processPingPV(req, resp);
        return;
    }

    for (String pvName : pvNames)
        if (pvName.endsWith(".VAL")) {
            int len = pvName.length();
            pvName = pvName.substring(0, len - 4);
            logger.info("Removing .VAL from pvName for request giving " + pvName);
        }

    // ISO datetimes are of the form "2011-02-02T08:00:00.000Z"
    Timestamp end = TimeUtils.plusHours(TimeUtils.now(), 1);
    if (endTimeStr != null) {
        try {
            end = TimeUtils.convertFromISO8601String(endTimeStr);
        } catch (IllegalArgumentException ex) {
            try {
                end = TimeUtils.convertFromDateTimeStringWithOffset(endTimeStr);
            } catch (IllegalArgumentException ex2) {
                String msg = "Cannot parse time " + endTimeStr;
                logger.warn(msg, ex2);
                resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
                resp.sendError(HttpServletResponse.SC_BAD_REQUEST, msg);
                return;
            }
        }
    }

    // We get one day by default
    Timestamp start = TimeUtils.minusDays(end, 1);
    if (startTimeStr != null) {
        try {
            start = TimeUtils.convertFromISO8601String(startTimeStr);
        } catch (IllegalArgumentException ex) {
            try {
                start = TimeUtils.convertFromDateTimeStringWithOffset(startTimeStr);
            } catch (IllegalArgumentException ex2) {
                String msg = "Cannot parse time " + startTimeStr;
                logger.warn(msg, ex2);
                resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
                resp.sendError(HttpServletResponse.SC_BAD_REQUEST, msg);
                return;
            }
        }
    }

    if (end.before(start)) {
        String msg = "For request, end " + end.toString() + " is before start " + start.toString() + " for pvs "
                + StringUtils.join(pvNames, ", ");
        logger.error(msg);
        resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
        resp.sendError(HttpServletResponse.SC_BAD_REQUEST, msg);
        return;
    }

    LinkedList<TimeSpan> requestTimes = new LinkedList<TimeSpan>();

    // We can specify a list of time stamp pairs using the optional timeranges parameter
    String timeRangesStr = req.getParameter("timeranges");
    if (timeRangesStr != null) {
        boolean continueWithRequest = parseTimeRanges(resp, "[" + StringUtils.join(pvNames, ", ") + "]",
                requestTimes, timeRangesStr);
        if (!continueWithRequest) {
            // Cannot parse the time ranges properly; we so abort the request.
            String msg = "The specified time ranges could not be processed appropriately. Aborting.";
            logger.info(msg);
            resp.setHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
            resp.sendError(HttpServletResponse.SC_BAD_REQUEST, msg);
            return;
        }

        // Override the start and the end so that the mergededup consumer works correctly.
        start = requestTimes.getFirst().getStartTime();
        end = requestTimes.getLast().getEndTime();

    } else {
        requestTimes.add(new TimeSpan(start, end));
    }

    assert (requestTimes.size() > 0);

    // Get a post processor for each PV specified in pvNames
    // If PV in the form <pp>(<pv>), process it
    String postProcessorUserArg = req.getParameter("pp");
    List<String> postProcessorUserArgs = new ArrayList<>(pvNames.size());
    List<PostProcessor> postProcessors = new ArrayList<>(pvNames.size());
    for (int i = 0; i < pvNames.size(); i++) {
        postProcessorUserArgs.add(postProcessorUserArg);

        if (pvNames.get(i).contains("(")) {
            if (!pvNames.get(i).contains(")")) {
                String msg = "Unbalanced paren " + pvNames.get(i);
                logger.error(msg);
                resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
                resp.sendError(HttpServletResponse.SC_BAD_REQUEST, msg);
                return;
            }
            String[] components = pvNames.get(i).split("[(,)]");
            postProcessorUserArg = components[0];
            postProcessorUserArgs.set(i, postProcessorUserArg);
            pvNames.set(i, components[1]);
            if (components.length > 2) {
                for (int j = 2; j < components.length; j++) {
                    postProcessorUserArgs.set(i, postProcessorUserArgs.get(i) + "_" + components[j]);
                }
            }
            logger.info("After parsing the function call syntax pvName is " + pvNames.get(i)
                    + " and postProcessorUserArg is " + postProcessorUserArg);
        }
        postProcessors.add(PostProcessors.findPostProcessor(postProcessorUserArg));
    }

    List<PVTypeInfo> typeInfos = new ArrayList<PVTypeInfo>(pvNames.size());
    for (int i = 0; i < pvNames.size(); i++) {
        typeInfos.add(PVNames.determineAppropriatePVTypeInfo(pvNames.get(i), configService));
    }
    pmansProfiler.mark("After PVTypeInfo");

    for (int i = 0; i < pvNames.size(); i++)
        if (typeInfos.get(i) == null && RetrievalState.includeExternalServers(req)) {
            logger.debug(
                    "Checking to see if pv " + pvNames.get(i) + " is served by a external Archiver Server");
            typeInfos.set(i,
                    checkIfPVisServedByExternalServer(pvNames.get(i), start, req, resp, useChunkedEncoding));
        }

    for (int i = 0; i < pvNames.size(); i++) {
        if (typeInfos.get(i) == null) {
            // TODO Only needed if we're forwarding the request to another server.
            if (resp.isCommitted()) {
                logger.debug("Proxied the data thru an external server for PV " + pvNames.get(i));
                return;
            }

            if (retiredPVTemplate != null) {
                PVTypeInfo templateTypeInfo = PVNames.determineAppropriatePVTypeInfo(retiredPVTemplate,
                        configService);
                if (templateTypeInfo != null) {
                    typeInfos.set(i, new PVTypeInfo(pvNames.get(i), templateTypeInfo));
                    typeInfos.get(i).setPaused(true);
                    typeInfos.get(i).setApplianceIdentity(configService.getMyApplianceInfo().getIdentity());
                    // Somehow tell the code downstream that this is a fake typeInfos.
                    typeInfos.get(i).setSamplingMethod(SamplingMethod.DONT_ARCHIVE);
                    logger.debug("Using a template PV for " + pvNames.get(i)
                            + " Need to determine the actual DBR type.");
                    setActualDBRTypeFromData(pvNames.get(i), typeInfos.get(i), configService);
                }
            }
        }

        if (typeInfos.get(i) == null) {
            String msg = "Unable to find typeinfo for pv " + pvNames.get(i);
            logger.error(msg);
            resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
            resp.sendError(HttpServletResponse.SC_NOT_FOUND, msg);
            return;
        }

        if (postProcessors.get(i) == null) {
            if (useReduced) {
                String defaultPPClassName = configService.getInstallationProperties().getProperty(
                        "org.epics.archiverappliance.retrieval.DefaultUseReducedPostProcessor",
                        FirstSamplePP.class.getName());
                logger.debug("Using the default usereduced preprocessor " + defaultPPClassName);
                try {
                    postProcessors.set(i, (PostProcessor) Class.forName(defaultPPClassName).newInstance());
                } catch (Exception ex) {
                    logger.error("Exception constructing new instance of post processor " + defaultPPClassName,
                            ex);
                    postProcessors.set(i, null);
                }
            }
        }

        if (postProcessors.get(i) == null) {
            logger.debug("Using the default raw preprocessor");
            postProcessors.set(i, new DefaultRawPostProcessor());
        }
    }

    // Get the appliances for each of the PVs
    List<ApplianceInfo> applianceForPVs = new ArrayList<ApplianceInfo>(pvNames.size());
    for (int i = 0; i < pvNames.size(); i++) {
        applianceForPVs.add(configService.getApplianceForPV(pvNames.get(i)));
        if (applianceForPVs.get(i) == null) {
            // TypeInfo cannot be null here...
            assert (typeInfos.get(i) != null);
            applianceForPVs.set(i, configService.getAppliance(typeInfos.get(i).getApplianceIdentity()));
        }
    }

    /*
     * Retrieving the external appliances if the current appliance has not got the PV assigned to it, and
     * storing the associated information of the PVs in that appliance.
     */
    Map<String, ArrayList<PVInfoForClusterRetrieval>> applianceToPVs = new HashMap<String, ArrayList<PVInfoForClusterRetrieval>>();
    for (int i = 0; i < pvNames.size(); i++) {
        if (!applianceForPVs.get(i).equals(configService.getMyApplianceInfo())) {

            ArrayList<PVInfoForClusterRetrieval> appliancePVs = applianceToPVs
                    .get(applianceForPVs.get(i).getMgmtURL());
            appliancePVs = (appliancePVs == null) ? new ArrayList<>() : appliancePVs;
            PVInfoForClusterRetrieval pvInfoForRetrieval = new PVInfoForClusterRetrieval(pvNames.get(i),
                    typeInfos.get(i), postProcessors.get(i), applianceForPVs.get(i));
            appliancePVs.add(pvInfoForRetrieval);
            applianceToPVs.put(applianceForPVs.get(i).getRetrievalURL(), appliancePVs);
        }
    }

    List<List<Future<EventStream>>> listOfEventStreamFuturesLists = new ArrayList<List<Future<EventStream>>>();
    Set<String> retrievalURLs = applianceToPVs.keySet();
    if (retrievalURLs.size() > 0) {
        // Get list of PVs and redirect them to appropriate appliance to be retrieved.
        String retrievalURL;
        ArrayList<PVInfoForClusterRetrieval> pvInfos;
        while (!((retrievalURL = retrievalURLs.iterator().next()) != null)) {
            // Get array list of PVs for appliance
            pvInfos = applianceToPVs.get(retrievalURL);
            try {
                List<List<Future<EventStream>>> resultFromForeignAppliances = retrieveEventStreamFromForeignAppliance(
                        req, resp, pvInfos, requestTimes, useChunkedEncoding,
                        retrievalURL + "/../data/getDataForPVs.raw", start, end);
                listOfEventStreamFuturesLists.addAll(resultFromForeignAppliances);
            } catch (Exception ex) {
                logger.error("Failed to retrieve " + StringUtils.join(pvNames, ", ") + " from " + retrievalURL
                        + ".");
                return;
            }
        }
    }

    pmansProfiler.mark("After Appliance Info");

    // Setting post processor for PVs, taking into account whether there is a field in the PV name
    List<String> pvNamesFromRequests = new ArrayList<String>(pvNames.size());
    for (int i = 0; i < pvNames.size(); i++) {
        String pvName = pvNames.get(i);
        pvNamesFromRequests.add(pvName);
        PVTypeInfo typeInfo = typeInfos.get(i);
        postProcessorUserArg = postProcessorUserArgs.get(i);

        // If a field is specified in a PV name, it will create a post processor for that
        String fieldName = PVNames.getFieldName(pvName);
        if (fieldName != null && !fieldName.equals("") && !pvName.equals(typeInfo.getPvName())) {
            logger.debug("We reset the pvName " + pvName + " to one from the typeinfo " + typeInfo.getPvName()
                    + " as that determines the name of the stream. " + "Also using ExtraFieldsPostProcessor.");
            pvNames.set(i, typeInfo.getPvName());
            postProcessors.set(i, new ExtraFieldsPostProcessor(fieldName));
        }

        try {
            // Postprocessors get their mandatory arguments from the request.
            // If user does not pass in the expected request, throw an exception.
            postProcessors.get(i).initialize(postProcessorUserArg, pvName);
        } catch (Exception ex) {
            String msg = "Postprocessor threw an exception during initialization for " + pvName;
            logger.error(msg, ex);
            resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
            resp.sendError(HttpServletResponse.SC_NOT_FOUND, msg);
            return;
        }
    }

    /*
     * MergeDedupConsumer is what writes PB data in its respective format to the HTML response.
     * The response, after the MergeDedupConsumer is created, contains the following:
     * 
     * 1) The content type for the response.
     * 2) Any additional headers for the particular MIME response.
     * 
     * Additionally, the MergeDedupConsumer instance holds a reference to the output stream
     * that is used to write to the HTML response. It is stored under the name `os`.
     */
    MergeDedupConsumer mergeDedupCountingConsumer;
    try {
        mergeDedupCountingConsumer = createMergeDedupConsumer(resp, extension, useChunkedEncoding);
    } catch (ServletException se) {
        String msg = "Exception when retrieving data " + "-->" + se.toString();
        logger.error(msg, se);
        resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
        resp.sendError(HttpServletResponse.SC_SERVICE_UNAVAILABLE, msg);
        return;
    }

    /* 
     * BasicContext contains the PV name and the expected return type. Used to access PB files.
     * RetrievalExecutorResult contains a thread service class and the time spans Presumably, the 
     * thread service is what retrieves the data, and the BasicContext is the context in which it 
     * works.
     */
    List<HashMap<String, String>> engineMetadatas = new ArrayList<HashMap<String, String>>();
    try {
        List<BasicContext> retrievalContexts = new ArrayList<BasicContext>(pvNames.size());
        List<RetrievalExecutorResult> executorResults = new ArrayList<RetrievalExecutorResult>(pvNames.size());
        for (int i = 0; i < pvNames.size(); i++) {
            if (fetchLatestMetadata) {
                // Make a call to the engine to fetch the latest metadata.
                engineMetadatas.add(fetchLatestMedataFromEngine(pvNames.get(i), applianceForPVs.get(i)));
            }
            retrievalContexts.add(new BasicContext(typeInfos.get(i).getDBRType(), pvNamesFromRequests.get(i)));
            executorResults.add(determineExecutorForPostProcessing(pvNames.get(i), typeInfos.get(i),
                    requestTimes, req, postProcessors.get(i)));
        }

        /*
         * There are as many Future objects in the eventStreamFutures List as there are periods over 
         * which to fetch data. Retrieval of data happen here in parallel.
         */
        List<LinkedList<Future<RetrievalResult>>> listOfRetrievalResultFuturesLists = new ArrayList<LinkedList<Future<RetrievalResult>>>();
        for (int i = 0; i < pvNames.size(); i++) {
            listOfRetrievalResultFuturesLists.add(resolveAllDataSources(pvNames.get(i), typeInfos.get(i),
                    postProcessors.get(i), applianceForPVs.get(i), retrievalContexts.get(i),
                    executorResults.get(i), req, resp));
        }
        pmansProfiler.mark("After data source resolution");

        for (int i = 0; i < pvNames.size(); i++) {
            // Data is retrieved here
            List<Future<EventStream>> eventStreamFutures = getEventStreamFuturesFromRetrievalResults(
                    executorResults.get(i), listOfRetrievalResultFuturesLists.get(i));
            listOfEventStreamFuturesLists.add(eventStreamFutures);
        }

    } catch (Exception ex) {
        if (ex != null && ex.toString() != null && ex.toString().contains("ClientAbortException")) {
            // We check for ClientAbortException etc this way to avoid including tomcat jars in the build path.
            logger.debug("Exception when retrieving data ", ex);
        } else {
            logger.error("Exception when retrieving data " + "-->" + ex.toString(), ex);
        }
    }

    long s1 = System.currentTimeMillis();
    String currentlyProcessingPV = null;

    /*
     * The following try bracket goes through each of the streams in the list of event stream futures.
     * 
     * It is intended that the process goes through one PV at a time.
     */
    try {
        for (int i = 0; i < pvNames.size(); i++) {
            List<Future<EventStream>> eventStreamFutures = listOfEventStreamFuturesLists.get(i);
            String pvName = pvNames.get(i);
            PVTypeInfo typeInfo = typeInfos.get(i);
            HashMap<String, String> engineMetadata = fetchLatestMetadata ? engineMetadatas.get(i) : null;
            PostProcessor postProcessor = postProcessors.get(i);

            logger.debug("Done with the RetrievalResults; moving onto the individual event stream "
                    + "from each source for " + StringUtils.join(pvNames, ", "));
            pmansProfiler.mark("After retrieval results");
            for (Future<EventStream> future : eventStreamFutures) {
                EventStreamDesc sourceDesc = null;

                // Gets the result of a data retrieval
                try (EventStream eventStream = future.get()) {
                    sourceDesc = null; // Reset it for each loop iteration.
                    sourceDesc = eventStream.getDescription();
                    if (sourceDesc == null) {
                        logger.warn("Skipping event stream without a desc for pv " + pvName);
                        continue;
                    }

                    logger.debug("Processing event stream for pv " + pvName + " from source "
                            + ((eventStream.getDescription() != null) ? eventStream.getDescription().getSource()
                                    : " unknown"));

                    try {
                        mergeTypeInfo(typeInfo, sourceDesc, engineMetadata);
                    } catch (MismatchedDBRTypeException mex) {
                        logger.error(mex.getMessage(), mex);
                        continue;
                    }

                    if (currentlyProcessingPV == null || !currentlyProcessingPV.equals(pvName)) {
                        logger.debug("Switching to new PV " + pvName + " In some mime responses we insert "
                                + "special headers at the beginning of the response. Calling the hook for "
                                + "that");
                        currentlyProcessingPV = pvName;
                        /*
                         * Goes through the PB data stream over a period of time. The relevant MIME response
                         * actually deal with the processing of the PV. `start` and `end` refer to the very
                         * beginning and very end of the time period being retrieved over, regardless of
                         * whether it is divided up or not.
                         */
                        mergeDedupCountingConsumer.processingPV(currentlyProcessingPV, start, end,
                                (eventStream != null) ? sourceDesc : null);
                    }

                    try {
                        // If the postProcessor does not have a consolidated event stream, we send each eventstream across as we encounter it.
                        // Else we send the consolidatedEventStream down below.
                        if (!(postProcessor instanceof PostProcessorWithConsolidatedEventStream)) {
                            /*
                             * The eventStream object contains all the data over the current period.
                             */
                            mergeDedupCountingConsumer.consumeEventStream(eventStream);
                            resp.flushBuffer();
                        }
                    } catch (Exception ex) {
                        if (ex != null && ex.toString() != null
                                && ex.toString().contains("ClientAbortException")) {
                            // We check for ClientAbortException etc this way to avoid including tomcat jars in the build path.
                            logger.debug(
                                    "Exception when consuming and flushing data from " + sourceDesc.getSource(),
                                    ex);
                        } else {
                            logger.error("Exception when consuming and flushing data from "
                                    + sourceDesc.getSource() + "-->" + ex.toString(), ex);
                        }
                    }
                    pmansProfiler.mark("After event stream " + eventStream.getDescription().getSource());
                } catch (Exception ex) {
                    if (ex != null && ex.toString() != null && ex.toString().contains("ClientAbortException")) {
                        // We check for ClientAbortException etc this way to avoid including tomcat jars in the build path.
                        logger.debug("Exception when consuming and flushing data from "
                                + (sourceDesc != null ? sourceDesc.getSource() : "N/A"), ex);
                    } else {
                        logger.error("Exception when consuming and flushing data from "
                                + (sourceDesc != null ? sourceDesc.getSource() : "N/A") + "-->" + ex.toString(),
                                ex);
                    }
                }
            }

            // TODO Go through data from other appliances here

            if (postProcessor instanceof PostProcessorWithConsolidatedEventStream) {
                try (EventStream eventStream = ((PostProcessorWithConsolidatedEventStream) postProcessor)
                        .getConsolidatedEventStream()) {
                    EventStreamDesc sourceDesc = eventStream.getDescription();
                    if (sourceDesc == null) {
                        logger.error("Skipping event stream without a desc for pv " + pvName
                                + " and post processor " + postProcessor.getExtension());
                    } else {
                        mergeDedupCountingConsumer.consumeEventStream(eventStream);
                        resp.flushBuffer();
                    }
                }
            }

            // If the postProcessor needs to send final data across, give it a chance now...
            if (postProcessor instanceof AfterAllStreams) {
                EventStream finalEventStream = ((AfterAllStreams) postProcessor).anyFinalData();
                if (finalEventStream != null) {
                    mergeDedupCountingConsumer.consumeEventStream(finalEventStream);
                    resp.flushBuffer();
                }
            }

            pmansProfiler.mark("After writing all eventstreams to response");
        }
    } catch (Exception ex) {
        if (ex != null && ex.toString() != null && ex.toString().contains("ClientAbortException")) {
            // We check for ClientAbortException etc this way to avoid including tomcat jars in the build path.
            logger.debug("Exception when retrieving data ", ex);
        } else {
            logger.error("Exception when retrieving data " + "-->" + ex.toString(), ex);
        }
    }

    long s2 = System.currentTimeMillis();
    logger.info("For the complete request, found a total of " + mergeDedupCountingConsumer.totalEventsForAllPVs
            + " in " + (s2 - s1) + "(ms)" + " skipping " + mergeDedupCountingConsumer.skippedEventsForAllPVs
            + " events" + " deduping involved " + mergeDedupCountingConsumer.comparedEventsForAllPVs
            + " compares.");

    pmansProfiler.mark("After all closes and flushing all buffers");

    // Till we determine all the if conditions where we log this, we log sparingly..
    if (pmansProfiler.totalTimeMS() > 5000) {
        logger.error("Retrieval time for " + StringUtils.join(pvNames, ", ") + " from " + startTimeStr + " to "
                + endTimeStr + ": " + pmansProfiler.toString());
    }

    mergeDedupCountingConsumer.close();
}

From source file:gov.nih.nci.cadsrapi.dao.orm.CleanerDAO.java

public void clean() {
    if (sessionFactory == null) {
        setFactory();//  www  . j  a v  a 2s  .co m
        this.setSessionFactory(sessionFactory);
    }
    Session session = getSession();
    Transaction tx = session.beginTransaction();

    //Check for unexpired carts that have been active within the expiration interval and reset expiration time.
    int expirationInterval = 4 * 24 * 60; //Four days, in minutes
    int sleepTime = 60; //One hour, in minutes

    //Defaults
    int publicEmptyExpirationDays = 4 * 24 * 60;
    String emptyExpirationSQL = " (SYSDATE + INTERVAL '" + publicEmptyExpirationDays + "' MINUTE)";

    int publicFullExpirationDays = 30 * 24 * 60; //30 days, in minutes
    String fullExpirationSQL = " (SYSDATE + INTERVAL '" + publicFullExpirationDays + "' MINUTE)";

    try {
        int temp = Integer.valueOf(PropertiesLoader.getProperty("cart.time.expiration.minutes"));
        expirationInterval = temp;
    } catch (Exception e) {
        log.error(e);
    }

    try {
        int temp = Integer.valueOf(PropertiesLoader.getProperty("cart.cleaner.sleep.minutes"));
        sleepTime = temp;
    } catch (Exception e) {
        log.error(e);
    }

    try {
        int temp = Integer.valueOf(PropertiesLoader.getProperty("cart.public.empty.expiration.minutes"));
        publicEmptyExpirationDays = temp;
    } catch (Exception e) {
        log.error(e);
    }

    try {
        int temp = Integer.valueOf(PropertiesLoader.getProperty("cart.public.full.expiration.minutes"));
        publicFullExpirationDays = temp;
    } catch (Exception e) {
        log.error(e);
    }

    //Timestamps are in milliseconds
    Timestamp now = new Timestamp(System.currentTimeMillis());
    Timestamp nowMinusTwiceSleep = new Timestamp(now.getTime() - sleepTime * 60 * 1000 * 2); //Converting minutes to milliseconds
    Timestamp nowPlusExpirationInterval = new Timestamp(now.getTime() + expirationInterval * 60 * 1000); //Converting minutes to milliseconds

    Query updateActiveCarts = session.createQuery(
            "update gov.nih.nci.cadsr.objectcart.domain.Cart set expirationDate = :nowPlusExpirationInterval"
                    + " where (lastWriteDate > :nowMinusTwiceSleep or lastReadDate > :nowMinusTwiceSleep) and expirationDate > :now and expirationDate < :nowPlusExpirationInterval");

    updateActiveCarts.setTimestamp("nowPlusExpirationInterval", nowPlusExpirationInterval);
    updateActiveCarts.setTimestamp("nowMinusTwiceSleep", nowMinusTwiceSleep);
    updateActiveCarts.setTimestamp("now", now);

    if (publicEmptyExpirationDays > 0 && publicEmptyExpirationDays < 365 * 24 * 60) //Check expiration is within a year
        emptyExpirationSQL = " (SYSDATE + INTERVAL '" + publicEmptyExpirationDays + "' MINUTE)";
    else if (publicEmptyExpirationDays == 0)
        emptyExpirationSQL = "SYSDATE";

    if (publicFullExpirationDays > 0 && publicFullExpirationDays < 365) //Check expiration is within a year
        fullExpirationSQL = " (SYSDATE + INTERVAL '" + publicFullExpirationDays + "' MINUTE)";
    else if (publicFullExpirationDays == 0)
        fullExpirationSQL = "SYSDATE";

    //Set expiration date to emptyExpirationSQL if the user starts with 'PublicUser' and the current expiration date is null
    String initializeSessionCartSql = "UPDATE cart c" + " set expiration_Date = " + emptyExpirationSQL
            + " where" + " (c.user_Id like 'PublicUser%') and " + " (c.expiration_Date is null)";

    Query initPublicCarts = session.createSQLQuery(initializeSessionCartSql);

    //Set expiration date to fullExpiration if the user starts with 'PublicUser', the cart has been active (read or written to) in the last day and the cart has items
    //String nonEmptyCartSql = "UPDATE cart c left join cart_object co on c.id = co.cart_id " +
    //" set expiration_Date = "+fullExpirationSQL+" where" +
    //" (c.user_Id like 'PublicUser%') and " +
    //" (c.last_write_date > DATE_SUB(SYSDATE, INTERVAL "+ (sleepTime * 2)+" MINUTE) OR c.last_read_date > DATE_SUB(SYSDATE, INTERVAL "+ (sleepTime * 2) +" MINUTE)) and" +
    //" (co.id is not null)";

    String nonEmptyCartSql = "UPDATE cart c  " + " set expiration_Date = " + fullExpirationSQL + " where"
            + " (c.user_Id like 'PublicUser%') and " + " (c.last_write_date > (SYSDATE - INTERVAL '"
            + (sleepTime * 2) + "' MINUTE) OR c.last_read_date > (SYSDATE - INTERVAL '" + (sleepTime * 2)
            + "' MINUTE)) and"
            + " EXISTS (select id from cart_object co where co.id is not null and co.cart_id = c.id)";

    Query expNonEmptyPublicCarts = session.createSQLQuery(nonEmptyCartSql);

    //Now delete expired carts (carts where expiration date is in the past)
    //REQUIRES ON-DELETE Cascade support in underlying database on the 
    //CartObject cart_id FK constraint
    Query deleteCartQuery = session.createQuery(
            "delete from gov.nih.nci.cadsr.objectcart.domain.Cart " + "where expirationDate <=:now");

    deleteCartQuery.setTimestamp("now", now);

    try {
        int resetResults = updateActiveCarts.executeUpdate();
        if (resetResults > 0)
            log.debug("Reset expiration date for " + resetResults + "active carts");
        log.debug("Reset expiration date for " + resetResults + "active carts");
        /* GF 28500 */
        int expResults = initPublicCarts.executeUpdate();
        if (expResults > 0)
            log.debug("Expiration date set for " + expResults + " PublicUser carts");
        int expNEPCResults = expNonEmptyPublicCarts.executeUpdate();
        if (expNEPCResults > 0)
            log.debug("Expiration date set for " + expNEPCResults + " PublicUser carts");
        /* GF 28500 */

        int results = deleteCartQuery.executeUpdate();
        if (results > 0)
            log.debug("Deleted " + results + " carts at " + now.toString());

    } catch (JDBCException ex) {
        log.error("JDBC Exception in ORMDAOImpl ", ex);
        ex.printStackTrace();

    } catch (org.hibernate.HibernateException hbmEx) {
        log.error(hbmEx.getMessage());
        hbmEx.printStackTrace();
    } catch (Exception e) {
        log.error("Exception ", e);
        e.printStackTrace();
    } finally {
        try {
            tx.commit();
            session.close();
        } catch (Exception eSession) {
            log.error("Could not close the session - " + eSession.getMessage());
            eSession.printStackTrace();
        }
    }
}

From source file:org.openanzo.jdbc.container.sql.TransactionSQL.java

/**
 * Runs the insertTransaction prepared statement.
  * <code>//from  w w w.j  av  a2 s  . c o  m
 *        INSERT INTO {0} (CREATED) VALUES (?)    
 * </code>
 *
 *@param stmtProvider
 *         factory and cache of PreparedStatments
 *@param connection
 *          connection to underlying database
 *
 *@param created template parameter
 *
 *@param transactionTableName template parameter
 *@return  int
 *@throws  org.openanzo.jdbc.utils.RdbException
 */
public static int insertTransaction(final org.openanzo.jdbc.utils.PreparedStatementProvider stmtProvider,
        final java.sql.Connection connection, java.sql.Timestamp created, String transactionTableName)
        throws org.openanzo.jdbc.utils.RdbException {
    java.sql.PreparedStatement ps = null;
    //long startTimer=System.currentTimeMillis();
    try {
        ps = stmtProvider.getPreparedSQLStatement(insertTransaction, new String[] { transactionTableName },
                connection);
        int argc = 1;
        if (created == null) {
            throw new org.openanzo.jdbc.utils.RdbException(
                    org.openanzo.exceptions.ExceptionConstants.RDB.NULL_PARAMETER, "created",
                    "insertTransaction");
        } else {
            ps.setTimestamp(argc++, created);
        }
        int counter = 0;
        try {
            counter = ps.executeUpdate();
        } catch (java.sql.SQLException sqle) {
            if (sqle.getErrorCode() == 1205) {
                int retries = 0;
                while (retries < 5) {
                    try {
                        Thread.sleep(5000);
                    } catch (InterruptedException ie) {
                        throw sqle;
                    }
                    try {
                        counter = ps.executeUpdate();
                        break;
                    } catch (java.sql.SQLException sqleInner) {
                        if (sqleInner.getErrorCode() == 1205) {
                            retries++;
                        } else {
                            throw sqleInner;
                        }
                    }
                }
                if (retries >= 5) {
                    throw sqle;
                }
            } else {
                throw sqle;
            }
        }
        return counter;

    } catch (java.sql.SQLException e) {
        throw new org.openanzo.jdbc.utils.RdbException(
                org.openanzo.exceptions.ExceptionConstants.RDB.FAILED_EXECUTING_SQL, e, "insertTransaction",
                stmtProvider.getSqlString(insertTransaction),
                "" + "created=" + ((created != null) ? created.toString() : "null"),
                "" + "transactionTableName="
                        + ((transactionTableName != null) ? transactionTableName.toString() : "null"));
    } finally {
        if (ps != null) {
            try {
                ps.close();
            } catch (java.sql.SQLException sqle) {
                if (log.isDebugEnabled())
                    log.debug(org.openanzo.exceptions.LogUtils.RDB_MARKER, "Error closing prepared statement",
                            sqle);
            }
        }
        //long endtimer=(System.currentTimeMillis()-startTimer);
        //if(endtimer>CUTOFF)System.out.println("[insertTransaction]"+endtimer);
    }
}

From source file:gov.nih.nci.cadsr.sentinel.database.DBAlertOracle.java

/**
 * Pull all Permissible Values changed in the date range specified.
 *
 * @param dates_/* ww  w.  java 2 s  .c om*/
 *        The date comparison index.
 * @param start_
 *        The date to start.
 * @param end_
 *        The date to end.
 * @param creators_
 *        The list of desired creator user ids.
 * @param modifiers_
 *        The list of desired modifier user ids.
 * @return 0 if successful, otherwise the database error code.
 */
public ACData[] selectPV(int dates_, Timestamp start_, Timestamp end_, String creators_[],
        String modifiers_[]) {
    // There's always one that doesn't fit the pattern. Any changes to
    // selectBuild() must also be checked here for consistency.

    String start = "to_date('" + start_.toString().substring(0, 10) + "', 'yyyy/mm/dd')";
    String end = "to_date('" + end_.toString().substring(0, 10) + "', 'yyyy/mm/dd')";

    String select = "select 'p', 1, 'pv', zz.pv_idseq as id, '', -1, zz.value, '', "
            + "zz.date_modified, zz.date_created, zz.modified_by, zz.created_by, '', "
            + "'', '', ach.changed_column, ach.old_value, ach.new_value, ach.change_datetimestamp, ach.changed_table, ach.changed_by "
            + "from sbrext.ac_change_history_ext ach, sbr.permissible_values_view zz ";

    select = select + "where ach.change_datetimestamp >= " + start + " and ach.change_datetimestamp < " + end
            + " ";
    if (modifiers_ != null && modifiers_.length > 0 && modifiers_[0].charAt(0) != '(')
        select = select + "AND ach.changed_by in " + selectIN(modifiers_);
    select = select + whereACH(_ACTYPE_PV) + "AND zz.pv_idseq = ach.ac_idseq ";
    if (creators_ != null && creators_.length > 0 && creators_[0].charAt(0) != '(')
        select = select + "AND zz.created_by in " + selectIN(creators_);

    if (dates_ == _DATECONLY)
        select = select + "AND zz.date_created >= " + start + " and zz.date_created < " + end + " ";
    else if (dates_ == _DATEMONLY)
        select = select + "AND zz.date_modified is not NULL ";

    select = select + _orderbyACH;

    return selectAC(select);
}

From source file:com.davidmascharka.lips.TrackerActivity.java

/**
 * When a new WiFi scan comes in, get sensor values and predict position
 *//*from  ww w  .  jav  a  2 s .  c  om*/
private void updateScanResults() {
    resetWifiReadings();

    scanResults = wifiManager.getScanResults();

    // Start another scan to recalculate user position
    wifiManager.startScan();

    time = new Timestamp(System.currentTimeMillis());

    for (ScanResult result : scanResults) {
        if (wifiReadings.get(result.BSSID) != null) {
            wifiReadings.put(result.BSSID, result.level);
        } // else BSSID wasn't programmed in
    }
    //@author Mahesh Gaya added permission if-statment
    if (ActivityCompat.checkSelfPermission(this,
            android.Manifest.permission.ACCESS_FINE_LOCATION) != PackageManager.PERMISSION_GRANTED
            || ActivityCompat.checkSelfPermission(this,
                    android.Manifest.permission.ACCESS_WIFI_STATE) != PackageManager.PERMISSION_GRANTED
            || ActivityCompat.checkSelfPermission(this,
                    android.Manifest.permission.CHANGE_WIFI_STATE) != PackageManager.PERMISSION_GRANTED
            || ActivityCompat.checkSelfPermission(this,
                    android.Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) {
        Log.i(TAG, "Permissions have NOT been granted. Requesting permissions.");
        requestMyPermissions();
    } else {
        Log.i(TAG, "Permissions have already been granted. Getting last known location from GPS and Network");
        if (location == null) {
            location = locationManager.getLastKnownLocation(LocationManager.GPS_PROVIDER);
        }
        if (location == null) {
            locationManager.getLastKnownLocation(LocationManager.NETWORK_PROVIDER);
        }
    }
    setInstanceValues();

    printValues();

    // this is where the magic happens
    // TODO clean up
    if (!t.isAlive()) {
        t = new Thread(new Runnable() {
            public void run() {
                Timestamp myTime = time;
                // This doesn't do anything -> classifierXKStar is null -> not loaded
                /*try {
                   predictedX = (float) classifierXRBFRegressor.classifyInstance(xInstances.get(0));
                } catch (Exception e) {
                   e.printStackTrace();
                }
                // Likewise, doesn't happen
                try {
                   predictedY = (float) classifierYRBFRegressor.classifyInstance(yInstances.get(0));
                } catch (Exception e) {
                   e.printStackTrace();
                }*/

                // Get the partition that the new instance is in
                // Use the classifier of the predicted partition to predict an x and y value for
                // the new instance if the classifier is loaded (not null)
                try {
                    predictedPartition = partitionClassifier.classifyInstance(partitionInstances.get(0));
                    //double[] dist = partitionClassifier.distributionForInstance(partitionInstances.get(0)); // gets the probability distribution for the instance
                } catch (Exception e) {
                    e.printStackTrace();
                }

                String partitionString = partitionInstances.classAttribute().value((int) predictedPartition);
                if (partitionString.equals("upperleft")) {
                    if (partitionUpperLeftX != null) {
                        try {
                            predictedX = (float) partitionUpperLeftX.classifyInstance(xInstances.get(0));
                        } catch (Exception e) {
                            e.printStackTrace();
                        }
                    }
                    if (partitionUpperLeftY != null) {
                        try {
                            predictedY = (float) partitionUpperLeftY.classifyInstance(yInstances.get(0));
                        } catch (Exception e) {
                            e.printStackTrace();
                        }
                    }
                } else if (partitionString.equals("upperright")) {
                    if (partitionUpperRightX != null) {
                        try {
                            predictedX = (float) partitionUpperRightX.classifyInstance(xInstances.get(0));
                        } catch (Exception e) {
                            e.printStackTrace();
                        }
                    }
                    if (partitionUpperRightY != null) {
                        try {
                            predictedY = (float) partitionUpperRightY.classifyInstance(yInstances.get(0));
                        } catch (Exception e) {
                            e.printStackTrace();
                        }
                    }
                } else if (partitionString.equals("lowerleft")) {
                    if (partitionLowerLeftX != null) {
                        try {
                            predictedX = (float) partitionLowerLeftX.classifyInstance(xInstances.get(0));
                        } catch (Exception e) {
                            e.printStackTrace();
                        }
                    }
                    if (partitionLowerLeftY != null) {
                        try {
                            predictedY = (float) partitionLowerLeftY.classifyInstance(yInstances.get(0));
                        } catch (Exception e) {
                            e.printStackTrace();
                        }
                    }
                } else if (partitionString.equals("lowerright")) {
                    if (partitionLowerRightX != null) {
                        try {
                            predictedX = (float) partitionLowerRightX.classifyInstance(xInstances.get(0));
                        } catch (Exception e) {
                            e.printStackTrace();
                        }
                    }
                    if (partitionLowerRightY != null) {
                        try {
                            predictedY = (float) partitionLowerRightY.classifyInstance(yInstances.get(0));
                        } catch (Exception e) {
                            e.printStackTrace();
                        }
                    }
                } else if (partitionString.equals("middle")) {
                    if (partitionMiddleX != null) {
                        try {
                            predictedX = (float) partitionMiddleX.classifyInstance(xInstances.get(0));
                        } catch (Exception e) {
                            e.printStackTrace();
                        }
                    }
                    if (partitionMiddleX != null) {
                        try {
                            predictedY = (float) partitionMiddleY.classifyInstance(yInstances.get(0));
                        } catch (Exception e) {
                            e.printStackTrace();
                        }
                    }
                }

                xText.post(new Runnable() {
                    public void run() {
                        xText.setText("X Position: " + predictedX);
                    }
                });

                yText.post(new Runnable() {
                    public void run() {
                        yText.setText("Y Position: " + predictedY);
                    }
                });

                // TODO: make this work -> grid is apparently null here. For whatever reason.
                /*runOnUiThread(new Runnable() {
                   public void run() {
                      grid.setUserPointCoords(predictedX, predictedY);
                   }
                });*/

                // Unnecessary if you're not testing
                writer.print("(" + predictedX + "," + predictedY + ")");
                writer.print(" %" + myTime.toString() + "\t " + time.toString() + "\t"
                        + new Timestamp(System.currentTimeMillis()) + "\n");
                writer.flush();
            }
        });
        t.setPriority(Thread.MIN_PRIORITY); // run in the background
        t.start();
    }
}