Example usage for java.sql ResultSet isLast

List of usage examples for java.sql ResultSet isLast

Introduction

In this page you can find the example usage for java.sql ResultSet isLast.

Prototype

boolean isLast() throws SQLException;

Source Link

Document

Retrieves whether the cursor is on the last row of this ResultSet object.

Usage

From source file:unikn.dbis.univis.visualization.graph.VGraph.java

/**
 * @param result The given resultset from the sql action.
 * @throws SQLException/*from   www.j ava  2 s .co m*/
 */
public void fillChartData(VDimension dimension, ResultSet result, ResultSet testResult) throws SQLException {

    layout.setAlignment(SwingConstants.CENTER);

    ResultSetMetaData data = result.getMetaData();
    int idPos = data.getColumnCount();
    int namePos = idPos - 1;
    int bufferPos = namePos - 1;

    List<String> testList = new ArrayList<String>();

    while (testResult.next()) {
        testList.add(testResult.getString(1));
    }

    List<String> helpList = new ArrayList<String>(testList);

    if (root == null) {

        cellHistory.historize();

        if (ChartType.BAR_CHART_VERTICAL.equals(chartType) || ChartType.BAR_CHART_HORIZONTAL.equals(chartType)
                || ChartType.AREA_CHART.equals(chartType)) {
            dataset = new DefaultCategoryDataset();

            while (result.next()) {
                ((DefaultCategoryDataset) dataset).addValue(result.getInt(1), result.getString(namePos + 1),
                        "");
            }
        } else {
            dataset = new DefaultPieDataset();

            while (result.next()) {
                ((DefaultPieDataset) dataset).setValue(result.getString(namePos + 1), result.getInt(1));
            }
        }

        root = createVertex(rootHeadLine, "");
        root.setCellId("root");
        cache.insert(root);
        cellHistory.add(root);
    } else {
        cellHistory.historize();

        String buffer = "";
        if (ChartType.BAR_CHART_VERTICAL.equals(chartType) || ChartType.BAR_CHART_HORIZONTAL.equals(chartType)
                || ChartType.AREA_CHART.equals(chartType)) {
            while (result.next()) {

                String currentValue = result.getString(idPos);

                if (!buffer.equals(currentValue)) {

                    if (!result.isFirst()) {
                        if (!helpList.isEmpty()) {
                            for (String missing : helpList) {
                                ((DefaultCategoryDataset) dataset).addValue(0, missing, "");
                            }
                        }
                    }

                    dataset = new DefaultCategoryDataset();
                    VGraphCell nextCell = createVertex(
                            MessageResolver.getMessage("data_reference." + dimension.getI18nKey()) + " ("
                                    + result.getString(bufferPos) + ")",
                            result.getString(idPos));
                    createEdges(nextCell, result.getString(idPos));
                    cache.insert(nextCell);
                    cellHistory.add(nextCell);
                    helpList = new ArrayList<String>(testList);
                }

                for (String available : testList) {
                    if (result.getString(namePos).equals(available)) {
                        helpList.remove(available);
                    }
                }

                ((DefaultCategoryDataset) dataset).addValue(result.getInt(1), result.getString(namePos), "");
                buffer = currentValue;

                if (result.isLast()) {
                    if (!helpList.isEmpty()) {
                        for (String missing : helpList) {
                            ((DefaultCategoryDataset) dataset).addValue(0, missing, "");
                        }
                    }
                }
            }

        } else {
            while (result.next()) {

                String currentValue = result.getString(idPos);

                LOG.info(result.getString(2));

                if (!buffer.equals(currentValue)) {

                    dataset = new DefaultPieDataset();

                    VGraphCell nextCell = createVertex(
                            MessageResolver.getMessage("data_reference." + dimension.getI18nKey()) + " ("
                                    + result.getString(bufferPos) + ")",
                            result.getString(idPos));
                    createEdges(nextCell, result.getString(idPos));
                    cache.insert(nextCell);
                    cellHistory.add(nextCell);
                }

                ((DefaultPieDataset) dataset).setValue(result.getString(namePos), result.getInt(1));

                buffer = currentValue;
            }
        }
    }
    layout.run(facade);
    facade.setOrdered(true);
    Map nested = facade.createNestedMap(true, true);
    cache.edit(nested);
}

From source file:org.intermine.bio.dataconversion.ModEncodeMetaDataProcessor.java

/**
 * ==========================/*from  w  ww  .  ja v  a2  s.com*/
 *    EXPERIMENTAL FACTORS
 * ==========================
 */
private void processEFactor(Connection connection) throws SQLException, ObjectStoreException {
    long bT = System.currentTimeMillis(); // to monitor time spent in the process

    ResultSet res = getEFactors(connection);
    int count = 0;
    int prevRank = -1;
    int prevSub = -1;
    ExperimentalFactor ef = null;
    String name = null;

    while (res.next()) {
        Integer submissionId = new Integer(res.getInt("experiment_id"));
        if (deletedSubMap.containsKey(submissionId)) {
            continue;
        }

        Integer rank = new Integer(res.getInt("rank"));
        String value = res.getString("value");

        // the data is alternating between EF types and names, in order.
        if (submissionId != prevSub) {
            // except for the first record, this is a new EF object
            if (!res.isFirst()) {
                submissionEFMap.put(prevSub, ef);
                LOG.info("EF MAP: " + dccIdMap.get(prevSub) + "|" + ef.efNames);
                LOG.info("EF MAP types: " + rank + "|" + ef.efTypes);
            }
            ef = new ExperimentalFactor();
        }
        if (rank != prevRank || submissionId != prevSub) {
            // this is a name
            if (getPreferredSynonym(value) != null) {
                value = getPreferredSynonym(value);
            }
            ef.efNames.add(value);
            name = value;
            count++;
        } else {
            // this is a type
            ef.efTypes.put(name, value);
            name = null;
            if (res.isLast()) {
                submissionEFMap.put(submissionId, ef);
                LOG.debug("EF MAP last: " + submissionId + "|" + rank + "|" + ef.efNames);
            }
        }
        prevRank = rank;
        prevSub = submissionId;
    }
    res.close();
    LOG.info("created " + count + " experimental factors");
    LOG.info("PROCESS TIME experimental factors: " + (System.currentTimeMillis() - bT) + " ms");
}

From source file:org.intermine.bio.dataconversion.ModEncodeMetaDataProcessor.java

/**
 * =====================/*from   w  w  w.jav  a2  s  .  c  o  m*/
 *    DATA ATTRIBUTES
 * =====================
 */
private void processAppliedDataAttributesNEW(Connection connection) throws SQLException, ObjectStoreException {
    // attempts to collate attributes
    // TODO check!
    long bT = System.currentTimeMillis(); // to monitor time spent in the process

    ResultSet res = getAppliedDataAttributes(connection);
    int count = 0;
    Integer previousDataId = 0;
    String previousName = null;
    String value = null;
    String type = null;
    while (res.next()) {
        Integer dataId = new Integer(res.getInt("data_id"));
        // check if not belonging to a deleted sub
        // better way?
        Integer submissionId = dataSubmissionMap.get(dataId);
        if (submissionId == null || deletedSubMap.containsKey(submissionId)) {
            continue;
        }
        String name = res.getString("heading");

        //            LOG.info("DA " + dataId + ": " + name);

        if (previousDataId == 0) { //first pass
            value = res.getString("value");
            type = res.getString("name");
            previousDataId = dataId;
            previousName = name;
            LOG.info("DA0 " + dataId + ": " + name + "|" + value);
            continue;
        }

        if (dataId > previousDataId) {
            Item dataAttribute = storeDataAttribute(value, type, previousDataId, previousName);
            value = res.getString("value");
            type = res.getString("name");
            count++;
            previousDataId = dataId;
            previousName = name;
            LOG.info("DA1 new: " + previousDataId + ": " + previousName + "|" + value);
            continue;
        }

        if (!name.equalsIgnoreCase(previousName)) {
            Item dataAttribute = storeDataAttribute(value, type, dataId, previousName);
            //                LOG.info("DA2 store: " + dataId + ": " + previousName + "|" + value);
            count++;
            value = res.getString("value");
            previousName = name;
            LOG.info("DA2 new: " + dataId + ": " + previousName + "|" + value);
        } else {
            value = value + ", " + res.getString("value");
        }
        type = res.getString("name");

        previousDataId = dataId;
        if (res.isLast()) {
            Item dataAttribute = storeDataAttribute(value, type, dataId, name);
            count++;
        }
    }
    LOG.info("created " + count + " data attributes");
    res.close();
    LOG.info("PROCESS TIME data attributes: " + (System.currentTimeMillis() - bT) + " ms");
}

From source file:org.intermine.bio.dataconversion.ModEncodeMetaDataProcessor.java

/**
 *
 * ====================/*  ww w.  j  av  a  2s.  co m*/
 *         DAG
 * ====================
 *
 * In chado, Applied protocols in a submission are linked to each other via
 * the flow of data (output of a parent AP are input to a child AP).
 * The method process the data from chado to build the objects
 * (SubmissionDetails, AppliedProtocol, AppliedData) and their
 * respective maps to chado identifiers needed to traverse the DAG.
 * It then traverse the DAG, assigning the experiment_id to all data.
 *
 * @param connection
 * @throws SQLException
 * @throws ObjectStoreException
 */

private void processDag(Connection connection) throws SQLException, ObjectStoreException {
    long bT = System.currentTimeMillis(); // to monitor time spent in the process

    ResultSet res = getDAG(connection);
    AppliedProtocol node = new AppliedProtocol();
    AppliedData branch = null;
    Integer count = new Integer(0);
    Integer actualSubmissionId = new Integer(0); // to store the experiment id (see below)
    Integer previousAppliedProtocolId = new Integer(0);
    boolean isADeletedSub = false;

    while (res.next()) {
        Integer submissionId = new Integer(res.getInt("experiment_id"));
        Integer protocolId = new Integer(res.getInt("protocol_id"));
        Integer appliedProtocolId = new Integer(res.getInt("applied_protocol_id"));
        Integer dataId = new Integer(res.getInt("data_id"));
        String direction = res.getString("direction");

        LOG.debug("DAG: " + submissionId + " p:" + protocolId + " ap:" + appliedProtocolId + " d:" + dataId
                + " | " + direction);

        // the results are ordered, first ap have a subId
        // if we find a deleted sub, we know that subsequent records with null
        // subId belongs to the deleted sub
        // note that while the subId is null in the database, it is = 0 here
        if (submissionId == 0) {
            if (isADeletedSub) {
                LOG.debug("DEL: skipping" + isADeletedSub);
                continue;
            }
        } else {
            if (deletedSubMap.containsKey(submissionId)) {
                isADeletedSub = true;
                LOG.debug("DEL: " + submissionId + " ->" + isADeletedSub);
                continue;
            } else {
                isADeletedSub = false;
                LOG.debug("DEL: " + submissionId + " ->" + isADeletedSub);
            }
        }

        // build a data node for each iteration
        if (appliedDataMap.containsKey(dataId)) {
            branch = appliedDataMap.get(dataId);
        } else {
            branch = new AppliedData();
        }
        // could use > (order by apid, apdataid, direction)
        // NB: using isLast() is expensive
        if (!appliedProtocolId.equals(previousAppliedProtocolId) || res.isLast()) {
            // the submissionId != null for the first applied protocol
            if (submissionId > 0) {
                firstAppliedProtocols.add(appliedProtocolId);
                LOG.debug("DAG fap subId:" + submissionId + " apID: " + appliedProtocolId);
                // set actual submission id
                // we can either be at a first applied protocol (submissionId > 0)..
                actualSubmissionId = submissionId;
            } else {
                // ..or already down the dag, and we use the stored id.
                submissionId = actualSubmissionId;
            }

            // last one: fill the list of outputs
            // and add to the general list of data ids for the submission,
            // used to fetch features
            if (res.isLast()) {
                if ("output".equalsIgnoreCase(direction)) {
                    node.outputs.add(dataId);
                    mapSubmissionAndData(submissionId, dataId);
                }
            }

            // if it is not the first iteration, let's store it
            if (previousAppliedProtocolId > 0) {
                appliedProtocolMap.put(previousAppliedProtocolId, node);
            }

            // new node
            AppliedProtocol newNode = new AppliedProtocol();
            newNode.protocolId = protocolId;
            newNode.submissionId = submissionId;

            if (direction.startsWith("in")) {
                // add this applied protocol to the list of nextAppliedProtocols
                branch.nextAppliedProtocols.add(appliedProtocolId);
                // ..and update the map
                updateAppliedDataMap(branch, dataId);
                // .. and add the dataId to the list of input Data for this applied protocol
                newNode.inputs.add(dataId);
                mapSubmissionAndData(submissionId, dataId); //***

            } else if (direction.startsWith("out")) {
                // add the dataId to the list of output Data for this applied protocol:
                // it will be used to link to the next set of applied protocols
                newNode.outputs.add(dataId);
                if (previousAppliedProtocolId > 0) {
                    branch.previousAppliedProtocols.add(previousAppliedProtocolId);
                    updateAppliedDataMap(branch, dataId); //***
                    mapSubmissionAndData(submissionId, dataId); //****
                }
            } else {
                // there is some problem with the strings 'input' or 'output'
                throw new IllegalArgumentException(
                        "Data direction not valid for dataId: " + dataId + "|" + direction + "|");
            }
            // for the new round..
            node = newNode;
            previousAppliedProtocolId = appliedProtocolId;

        } else {
            // keep feeding IN et OUT
            if (direction.startsWith("in")) {
                node.inputs.add(dataId);
                if (submissionId > 0) {
                    // initial data
                    mapSubmissionAndData(submissionId, dataId);
                }
                // as above
                branch.nextAppliedProtocols.add(appliedProtocolId);
                updateAppliedDataMap(branch, dataId);
            } else if (direction.startsWith("out")) {
                node.outputs.add(dataId);
                branch.previousAppliedProtocols.add(appliedProtocolId);
                updateAppliedDataMap(branch, dataId); //***
            } else {
                throw new IllegalArgumentException(
                        "Data direction not valid for dataId: " + dataId + "|" + direction + "|");
            }
        }
        count++;
    }
    LOG.info("created " + appliedProtocolMap.size() + "(" + count
            + " applied data points) DAG nodes (= applied protocols) in map");

    res.close();

    // now traverse the DAG, and associate submission with all the applied protocols
    traverseDag();
    // set the dag level as an attribute to applied protocol
    setAppliedProtocolSteps(connection);
    LOG.info("PROCESS TIME DAG: " + (System.currentTimeMillis() - bT) + " ms");
}