Example usage for java.util TreeMap isEmpty

List of usage examples for java.util TreeMap isEmpty

Introduction

In this page you can find the example usage for java.util TreeMap isEmpty.

Prototype

boolean isEmpty();

Source Link

Document

Returns true if this map contains no key-value mappings.

Usage

From source file:org.biomart.configurator.controller.MartController.java

/**
 * @param fksToBeDropped//  w  w w.  j  a v a 2  s. com
 * @param dmd
 * @param schema
 * @param catalog
 * @param stepSize
 * @throws SQLException
 * @throws DataModelException
 */
public void synchroniseKeysUsingDMD(final SourceSchema ss, final Collection<ForeignKey> fksToBeDropped,
        final DatabaseMetaData dmd, final String schema, final String catalog)
        throws SQLException, DataModelException {
    Log.debug("Running DMD key synchronisation");
    // Loop through all the tables in the database, which is the same
    // as looping through all the primary keys.
    Log.debug("Finding tables");
    for (final Iterator<Table> i = ss.getTables().iterator(); i.hasNext();) {

        // Obtain the table and its primary key.
        final SourceTable pkTable = (SourceTable) i.next();
        final PrimaryKey pk = pkTable.getPrimaryKey();
        // Skip all tables which have no primary key.
        if (pk == null)
            continue;

        Log.debug("Processing primary key " + pk);

        // Make a list of relations that already exist in this schema,
        // from some previous run. Any relations that are left in this
        // list by the end of the loop for this table no longer exist in
        // the database, and will be dropped.
        final Collection<Relation> relationsToBeDropped = new TreeSet<Relation>(pk.getRelations()); // Tree for
                                                                                                    // order

        // Identify all foreign keys in the database metadata that refer
        // to the current primary key.
        Log.debug("Finding referring foreign keys");
        String searchCatalog = catalog;
        String searchSchema = schema;
        final ResultSet dbTblFKCols = dmd.getExportedKeys(searchCatalog, searchSchema, pkTable.getName());

        // Loop through the results. There will be one result row per
        // column per key, so we need to build up a set of key columns
        // in a map.
        // The map keys represent the column position within a key. Each
        // map value is a list of columns. In essence the map is a 2-D
        // representation of the foreign keys which refer to this PK,
        // with the keys of the map (Y-axis) representing the column
        // position in the FK, and the values of the map (X-axis)
        // representing each individual FK. In all cases, FK columns are
        // assumed to be in the same order as the PK columns. The map is
        // sorted by key column position.
        // An assumption is made that the query will return columns from
        // the FK in the same order as all other FKs, ie. all column 1s
        // will be returned before any 2s, and then all 2s will be
        // returned
        // in the same order as the 1s they are associated with, etc.
        final TreeMap<Short, List<Column>> dbFKs = new TreeMap<Short, List<Column>>();
        while (dbTblFKCols.next()) {
            final String fkTblName = dbTblFKCols.getString("FKTABLE_NAME");
            final String fkColName = dbTblFKCols.getString("FKCOLUMN_NAME");
            final Short fkColSeq = new Short(dbTblFKCols.getShort("KEY_SEQ"));
            if (fkTblName != null && fkTblName.contains("$")) { // exclude ORACLE's temporary tables (unlikely to be
                                                                // found here though)
                continue;
            }

            // Note the column.
            if (!dbFKs.containsKey(fkColSeq))
                dbFKs.put(fkColSeq, new ArrayList<Column>());
            // In some dbs, FKs can be invalid, so we need to check
            // them.
            final Table fkTbl = ss.getTableByName(fkTblName);
            if (fkTbl != null) {
                final Column fkCol = (Column) fkTbl.getColumnByName(fkColName);
                if (fkCol != null)
                    (dbFKs.get(fkColSeq)).add(fkCol);
            }
        }
        dbTblFKCols.close();

        // Sort foreign keys by name (case insensitive)
        for (List<Column> columnList : dbFKs.values()) {
            Collections.sort(columnList);
        }

        // Only construct FKs if we actually found any.
        if (!dbFKs.isEmpty()) {
            // Identify the sequence of the first column, which may be 0
            // or 1, depending on database implementation.
            final int firstColSeq = ((Short) dbFKs.firstKey()).intValue();

            // How many columns are in the PK?
            final int pkColCount = pkTable.getPrimaryKey().getColumns().size();

            // How many FKs do we have?
            final int fkCount = dbFKs.get(dbFKs.firstKey()).size();

            // Loop through the FKs, and construct each one at a time.
            for (int j = 0; j < fkCount; j++) {
                // Set up an array to hold the FK columns.
                final List<Column> candidateFKColumns = new ArrayList<Column>();

                // For each FK column name, look up the actual column in
                // the table.
                for (final Iterator<Map.Entry<Short, List<Column>>> k = dbFKs.entrySet().iterator(); k
                        .hasNext();) {
                    final Map.Entry<Short, List<Column>> entry = k.next();
                    final Short keySeq = (Short) entry.getKey();
                    // Convert the db-specific column index to a
                    // 0-indexed figure for the array of fk columns.
                    final int fkColSeq = keySeq.intValue() - firstColSeq;
                    candidateFKColumns.add((Column) (entry.getValue()).get(j));
                }

                // Create a template foreign key based around the set
                // of candidate columns we found.
                ForeignKey fkObject;
                try {
                    List<Column> columns = new ArrayList<Column>();
                    for (int k = 0; k < candidateFKColumns.size(); k++) {
                        columns.add(candidateFKColumns.get(k));
                    }
                    fkObject = new ForeignKey(columns);
                    // new KeyController(fkObject);
                } catch (final Throwable t) {
                    throw new BioMartError(t);
                }
                final Table fkTable = fkObject.getTable();

                // If any FK already exists on the target table with the
                // same columns in the same order, then reuse it.
                boolean fkAlreadyExists = false;
                for (final Iterator<ForeignKey> f = fkTable.getForeignKeys().iterator(); f.hasNext()
                        && !fkAlreadyExists;) {
                    final ForeignKey candidateFK = f.next();
                    if (candidateFK.equals(fkObject)) {
                        // Found one. Reuse it!
                        fkObject = candidateFK;
                        // Update the status to indicate that the FK is
                        // backed by the database, if previously it was
                        // handmade.
                        if (fkObject.getStatus().equals(ComponentStatus.HANDMADE))
                            fkObject.setStatus(ComponentStatus.INFERRED);
                        // Remove the FK from the list to be dropped
                        // later, as it definitely exists now.
                        fksToBeDropped.remove(candidateFK);
                        // Flag the key as existing.
                        fkAlreadyExists = true;
                    }
                }

                // Has the key been reused, or is it a new one?
                if (!fkAlreadyExists)
                    try {
                        fkTable.getForeignKeys().add(fkObject);
                        // fkTable.getForeignKeys().add(fk);
                    } catch (final Throwable t) {
                        throw new BioMartError(t);
                    }

                // Work out whether the relation from the FK to
                // the PK should be 1:M or 1:1. The rule is that
                // it will be 1:M in all cases except where the
                // FK table has a PK with identical columns to
                // the FK, in which case it is 1:1, as the FK
                // is unique.
                Cardinality card = Cardinality.MANY_A;
                final PrimaryKey fkPK = fkTable.getPrimaryKey();
                if (fkPK != null && fkObject.getColumns().equals(fkPK.getColumns()))
                    card = Cardinality.ONE;

                // Check to see if it already has a relation.
                boolean relationExists = false;
                for (final Iterator<Relation> f = fkObject.getRelations().iterator(); f.hasNext();) {
                    // Obtain the next relation.
                    final Relation candidateRel = f.next();

                    // a) a relation already exists between the FK
                    // and the PK.
                    if (candidateRel.getOtherKey(fkObject).equals(pk)) {
                        // If cardinality matches, make it
                        // inferred. If doesn't match, make it
                        // modified and update original cardinality.
                        try {
                            if (card.equals(candidateRel.getCardinality())) {
                                if (!candidateRel.getStatus().equals(ComponentStatus.INFERRED_INCORRECT))
                                    candidateRel.setStatus(ComponentStatus.INFERRED);
                            } else {
                                if (!candidateRel.getStatus().equals(ComponentStatus.INFERRED_INCORRECT))
                                    candidateRel.setStatus(ComponentStatus.MODIFIED);
                                candidateRel.setOriginalCardinality(card);
                            }
                        } catch (final AssociationException ae) {
                            throw new BioMartError(ae);
                        }
                        // Don't drop it at the end of the loop.
                        relationsToBeDropped.remove(candidateRel);
                        // Say we've found it.
                        relationExists = true;
                    }

                    // b) a handmade relation exists elsewhere which
                    // should not be dropped. All other relations
                    // elsewhere will be dropped.
                    else if (candidateRel.getStatus().equals(ComponentStatus.HANDMADE))
                        // Don't drop it at the end of the loop.
                        relationsToBeDropped.remove(candidateRel);
                }

                // If relation did not already exist, create it.
                if (!relationExists && !pk.equals(fkObject)) {
                    // Establish the relation.
                    try {
                        new RelationSource(pk, fkObject, card);
                        // pk.getObject().addRelation(relation);
                        // fk.getObject().addRelation(relation);
                    } catch (final Throwable t) {
                        throw new BioMartError(t);
                    }
                }
            }
        }

        // Remove any relations that we didn't find in the database (but
        // leave the handmade ones behind).
        for (final Iterator<Relation> j = relationsToBeDropped.iterator(); j.hasNext();) {
            final Relation r = j.next();
            if (r.getStatus().equals(ComponentStatus.HANDMADE))
                continue;
            r.getFirstKey().removeRelation(r);
            r.getSecondKey().removeRelation(r);
        }
    }
}

From source file:net.spfbl.http.ServerHTTP.java

private static String getControlPanel(Locale locale, User user, Long begin, String filter) {
    StringBuilder builder = new StringBuilder();
    if (begin == null && filter == null) {
        //            builder.append("<!DOCTYPE html>\n");
        builder.append("<html lang=\"");
        builder.append(locale.getLanguage());
        builder.append("\">\n");
        builder.append("  <head>\n");
        builder.append("    <meta charset=\"UTF-8\">\n");
        if (locale.getLanguage().toLowerCase().equals("pt")) {
            builder.append("    <title>Painel de controle do SPFBL</title>\n");
        } else {//from w  w  w  . jav  a2s  .c om
            builder.append("    <title>SPFBL control panel</title>\n");
        }
        // Styled page.
        builder.append("    <style type=\"text/css\">\n");
        builder.append("      body {\n");
        builder.append("        margin:180px 0px 30px 0px;\n");
        builder.append("        background:lightgray;\n");
        builder.append("      }\n");
        builder.append("      iframe {\n");
        builder.append("        border-width: 0px 0px 0px 0px;\n");
        builder.append("        width:100%;\n");
        builder.append("        height:150px;\n");
        builder.append("      }\n");
        builder.append("      .header {\n");
        builder.append("        background-color:lightgray;\n");
        builder.append("        border-width: 0px 0px 0px 0px;\n");
        builder.append("        position:fixed;\n");
        builder.append("        top:0px;\n");
        builder.append("        margin:auto;\n");
        builder.append("        z-index:1;\n");
        builder.append("        width:100%;\n");
        builder.append("        height:180px;\n");
        builder.append("      }\n");
        builder.append("      .bottom {\n");
        builder.append("        background-color:lightgray;\n");
        builder.append("        border-width: 0px 0px 0px 0px;\n");
        builder.append("        position:fixed;\n");
        builder.append("        bottom:0px;\n");
        builder.append("        margin:auto;\n");
        builder.append("        z-index:1;\n");
        builder.append("        width:100%;\n");
        builder.append("        height:30px;\n");
        builder.append("      }\n");
        builder.append("      .button {\n");
        builder.append("          background-color: #4CAF50;\n");
        builder.append("          border: none;\n");
        builder.append("          color: white;\n");
        builder.append("          padding: 16px 32px;\n");
        builder.append("          text-align: center;\n");
        builder.append("          text-decoration: none;\n");
        builder.append("          display: inline-block;\n");
        builder.append("          font-size: 16px;\n");
        builder.append("          margin: 4px 2px;\n");
        builder.append("          -webkit-transition-duration: 0.4s;\n");
        builder.append("          transition-duration: 0.4s;\n");
        builder.append("          cursor: pointer;\n");
        builder.append("      }\n");
        builder.append("      .sender {\n");
        builder.append("          background-color: white; \n");
        builder.append("          color: black; \n");
        builder.append("          border: 2px solid #008CBA;\n");
        builder.append("          width: 100%;\n");
        builder.append("          word-wrap: break-word;\n");
        builder.append("      }\n");
        builder.append("      .sender:hover {\n");
        builder.append("          background-color: #008CBA;\n");
        builder.append("          color: white;\n");
        builder.append("      }\n");
        builder.append("      .highlight {\n");
        builder.append("        background: #b4b9d2;\n");
        builder.append("        color:black;\n");
        builder.append("        border-top: 1px solid #22262e;\n");
        builder.append("        border-bottom: 1px solid #22262e;\n");
        builder.append("      }\n");
        builder.append("      .highlight:nth-child(odd) td {\n");
        builder.append("        background: #b4b9d2;\n");
        builder.append("      }\n");
        builder.append("      .click {\n");
        builder.append("        cursor:pointer;\n");
        builder.append("        cursor:hand;\n");
        builder.append("      }\n");
        builder.append("      table {\n");
        builder.append("        background: white;\n");
        builder.append("        table-layout:fixed;\n");
        builder.append("        border-collapse: collapse;\n");
        builder.append("        word-wrap:break-word;\n");
        builder.append("        border-radius:3px;\n");
        builder.append("        border-collapse: collapse;\n");
        builder.append("        margin: auto;\n");
        builder.append("        padding:2px;\n");
        builder.append("        width: 100%;\n");
        builder.append("        box-shadow: 0 5px 10px rgba(0, 0, 0, 0.1);\n");
        builder.append("        animation: float 5s infinite;\n");
        builder.append("      }\n");
        builder.append("      th {\n");
        builder.append("        color:#FFFFFF;;\n");
        builder.append("        background:#1b1e24;\n");
        builder.append("        border-bottom:4px solid #9ea7af;\n");
        builder.append("        border-right: 0px;\n");
        builder.append("        font-size:16px;\n");
        builder.append("        font-weight: bold;\n");
        builder.append("        padding:4px;\n");
        builder.append("        text-align:left;\n");
        builder.append("        text-shadow: 0 1px 1px rgba(0, 0, 0, 0.1);\n");
        builder.append("        vertical-align:middle;\n");
        builder.append("        height:30px;\n");
        builder.append("      }\n");
        builder.append("      tr {\n");
        builder.append("        border-top: 1px solid #C1C3D1;\n");
        builder.append("        border-bottom-: 1px solid #C1C3D1;\n");
        builder.append("        font-size:16px;\n");
        builder.append("        font-weight:normal;\n");
        builder.append("        text-shadow: 0 1px 1px rgba(256, 256, 256, 0.1);\n");
        builder.append("      }\n");
        builder.append("      tr:nth-child(odd) td {\n");
        builder.append("        background:#EBEBEB;\n");
        builder.append("      }\n");
        builder.append("      td {\n");
        builder.append("        padding:2px;\n");
        builder.append("        vertical-align:middle;\n");
        builder.append("        font-size:16px;\n");
        builder.append("        text-shadow: -1px -1px 1px rgba(0, 0, 0, 0.1);\n");
        builder.append("        border-right: 1px solid #C1C3D1;\n");
        builder.append("      }\n");
        builder.append("      input[type=text], select {\n");
        builder.append("        width: 400px;\n");
        builder.append("        padding: 0px 4px;\n");
        builder.append("        margin: 1px 0;\n");
        builder.append("        display: inline-block;\n");
        builder.append("        background: #b4b9d2;\n");
        builder.append("        border: 1px solid #ccc;\n");
        builder.append("        border-radius: 4px;\n");
        builder.append("        box-sizing: border-box;\n");
        builder.append("      }\n");
        builder.append("    </style>\n");
        // JavaScript functions.
        TreeMap<Long, Query> queryMap = user.getQueryMap(null, null);
        builder.append(
                "    <script type=\"text/javascript\" src=\"https://ajax.googleapis.com/ajax/libs/jquery/2.1.3/jquery.min.js\"></script>\n");
        builder.append("    <script type=\"text/javascript\">\n");
        builder.append("      window.onbeforeunload = function () {\n");
        builder.append("        document.getElementById('filterField').value = '';\n");
        builder.append("        window.scrollTo(0, 0);\n");
        builder.append("      }\n");
        builder.append("      var last = ");
        if (queryMap.isEmpty()) {
            builder.append(0);
        } else {
            builder.append(queryMap.lastKey());
        }
        builder.append(";\n");
        builder.append("      var filterText = '';\n");
        builder.append("      function view(query) {\n");
        builder.append("        if (query == undefined || query == 0) {\n");
        builder.append("          var viewer = document.getElementById('viewer');\n");
        builder.append("          viewer.src = 'about:blank';\n");
        builder.append("          last = 0;\n");
        builder.append("        } else if (last != query) {\n");
        builder.append("          var viewer = document.getElementById('viewer');\n");
        builder.append("          viewer.addEventListener('load', function() {\n");
        builder.append("            if (document.getElementById(last)) {\n");
        builder.append("              document.getElementById(last).className = 'tr';\n");
        builder.append("              document.getElementById(last).className = 'click';\n");
        builder.append("            }\n");
        builder.append("            document.getElementById(query).className = 'highlight';\n");
        builder.append("            last = query;\n");
        builder.append("          });\n");
        builder.append("          viewer.src = '");
        builder.append(Core.getURL());
        builder.append("' + query;\n");
        builder.append("        }\n");
        builder.append("      }\n");
        builder.append("      function more(query) {\n");
        builder.append("        var rowMore = document.getElementById('rowMore');\n");
        builder.append("        rowMore.onclick = '';\n");
        builder.append("        rowMore.className = 'tr';\n");
        builder.append("        var columnMore = document.getElementById('columnMore');\n");
        if (locale.getLanguage().toLowerCase().equals("pt")) {
            builder.append("        columnMore.innerHTML = 'carregando mais registros';\n");
        } else {
            builder.append("        columnMore.innerHTML = 'loading more records';\n");
        }
        builder.append("        $.post(\n");
        builder.append("          '");
        builder.append(Core.getURL());
        builder.append(user.getEmail());
        builder.append("',\n");
        builder.append("          {filter:filterText,begin:query},\n");
        builder.append("          function(data, status) {\n");
        builder.append("            if (status == 'success') {\n");
        builder.append("              rowMore.parentNode.removeChild(rowMore);\n");
        builder.append("              $('#tableBody').append(data);\n");
        builder.append("            } else {\n");
        if (locale.getLanguage().toLowerCase().equals("pt")) {
            builder.append(
                    "              alert('Houve uma falha de sistema ao tentar realizar esta operao.');\n");
        } else {
            builder.append(
                    "              alert('There was a system crash while trying to perform this operation.');\n");
        }
        builder.append("            }\n");
        builder.append("          }\n");
        builder.append("        );\n");
        builder.append("      }\n");
        builder.append("      function refresh() {\n");
        builder.append("        filterText = document.getElementById('filterField').value;\n");
        builder.append("        $.post(\n");
        builder.append("          '");
        builder.append(Core.getURL());
        builder.append(user.getEmail());
        builder.append("',\n");
        builder.append("          {filter:filterText},\n");
        builder.append("          function(data, status) {\n");
        builder.append("            if (status == 'success') {\n");
        builder.append("              $('#tableBody').html(data);\n");
        builder.append("              view($('#tableBody tr').attr('id'));\n");
        builder.append("            } else {\n");
        if (locale.getLanguage().toLowerCase().equals("pt")) {
            builder.append(
                    "              alert('Houve uma falha de sistema ao tentar realizar esta operao.');\n");
        } else {
            builder.append(
                    "              alert('There was a system crash while trying to perform this operation.');\n");
        }
        builder.append("            }\n");
        builder.append("          }\n");
        builder.append("        );\n");
        builder.append("      }\n");
        builder.append("    </script>\n");
        builder.append("  </head>\n");
        // Body.
        builder.append("  <body>\n");
        builder.append("    <div class=\"header\">\n");
        if (queryMap.isEmpty()) {
            builder.append("      <iframe id=\"viewer\" src=\"about:blank\"></iframe>\n");
        } else {
            builder.append("      <iframe id=\"viewer\" src=\"");
            builder.append(Core.getURL());
            builder.append(queryMap.lastKey());
            builder.append("\"></iframe>\n");
        }
        // Construo da tabela de consultas.
        builder.append("      <table>\n");
        builder.append("        <thead>\n");
        builder.append("          <tr>\n");
        if (locale.getLanguage().toLowerCase().equals("pt")) {
            builder.append("            <th style=\"width:120px;\">Recepo</th>\n");
            builder.append("            <th>Origem</th>\n");
            builder.append("            <th>Remetente</th>\n");
            builder.append("            <th>Contedo</th>\n");
            builder.append("            <th>Entrega</th>\n");
        } else {
            builder.append("            <th style=\"width:160px;\">Reception</th>\n");
            builder.append("            <th style=\"width:auto;\">Source</th>\n");
            builder.append("            <th style=\"width:auto;\">Sender</th>\n");
            builder.append("            <th style=\"width:auto;\">Content</th>\n");
            builder.append("            <th style=\"width:auto;\">Delivery</th>\n");
        }
        builder.append("          </tr>\n");
        builder.append("        </thead>\n");
        builder.append("      </table>\n");
        builder.append("    </div>\n");
        if (queryMap.isEmpty()) {
            builder.append("    <table>\n");
            builder.append("      <tbody>\n");
            builder.append("        <tr>\n");
            if (locale.getLanguage().toLowerCase().equals("pt")) {
                builder.append(
                        "          <td colspan=\"5\" align=\"center\">nenhum registro encontrado</td>\n");
            } else {
                builder.append("          <td colspan=\"5\" align=\"center\">no records found</td>\n");
            }
            builder.append("        </tr>\n");
            builder.append("      </tbody>\n");
            builder.append("    </table>\n");
        } else {
            DateFormat dateFormat = DateFormat.getDateTimeInstance(DateFormat.SHORT, DateFormat.MEDIUM, locale);
            GregorianCalendar calendar = new GregorianCalendar();
            Long nextQuery = null;
            while (queryMap.size() > User.QUERY_MAX_ROWS) {
                nextQuery = queryMap.pollFirstEntry().getKey();
            }
            builder.append("    <table>\n");
            builder.append("      <tbody id=\"tableBody\">\n");
            for (Long time : queryMap.descendingKeySet()) {
                User.Query query = queryMap.get(time);
                boolean highlight = time.equals(queryMap.lastKey());
                buildQueryRow(locale, builder, dateFormat, calendar, time, query, highlight);
            }
            if (nextQuery == null) {
                builder.append("      <tr>\n");
                if (locale.getLanguage().toLowerCase().equals("pt")) {
                    builder.append(
                            "        <td colspan=\"5\" align=\"center\">no foram encontrados outros registros</td>\n");
                } else {
                    builder.append("        <td colspan=\"5\" align=\"center\">no more records found</td>\n");
                }
                builder.append("      </tr>\n");
            } else {
                builder.append("        <tr id=\"rowMore\" class=\"click\" onclick=\"more('");
                builder.append(nextQuery);
                builder.append("')\">\n");
                if (locale.getLanguage().toLowerCase().equals("pt")) {
                    builder.append(
                            "          <td id=\"columnMore\" colspan=\"5\" align=\"center\">clique para ver mais registros</td>\n");
                } else {
                    builder.append(
                            "          <td id=\"columnMore\" colspan=\"5\" align=\"center\">click to see more records</td>\n");
                }
                builder.append("        </tr>\n");
            }
            builder.append("      </tbody>\n");
            builder.append("    </table>\n");
        }
        builder.append("    <div class=\"bottom\">\n");
        builder.append("      <table>\n");
        builder.append("        <tr>\n");
        if (locale.getLanguage().toLowerCase().equals("pt")) {
            builder.append(
                    "          <th>Pesquisar <input type=\"text\" id=\"filterField\" name=\"filterField\" onkeydown=\"if (event.keyCode == 13) refresh();\" autofocus></th>\n");
        } else {
            builder.append(
                    "          <th>Search <input type=\"text\" id=\"filterField\" name=\"filterField\" onkeydown=\"if (event.keyCode == 13) refresh();\" autofocus></th>\n");
        }
        builder.append("          <th style=\"text-align:right;\"><small>");
        builder.append(
                "Powered by <a target=\"_blank\" href=\"http://spfbl.net/\" style=\"color: #b4b9d2;\">SPFBL.net</a></small>");
        builder.append("</th>\n");
        builder.append("        </tr>\n");
        builder.append("      <table>\n");
        builder.append("    </div>\n");
        builder.append("  </body>\n");
        builder.append("</html>\n");
    } else {
        TreeMap<Long, Query> queryMap = user.getQueryMap(begin, filter);
        if (queryMap.isEmpty()) {
            builder.append("        <tr>\n");
            if (locale.getLanguage().toLowerCase().equals("pt")) {
                builder.append(
                        "          <td colspan=\"5\" align=\"center\">nenhum registro encontrado</td>\n");
            } else {
                builder.append("          <td colspan=\"5\" align=\"center\">no records found</td>\n");
            }
            builder.append("        </tr>\n");
        } else {
            DateFormat dateFormat = DateFormat.getDateTimeInstance(DateFormat.SHORT, DateFormat.MEDIUM, locale);
            GregorianCalendar calendar = new GregorianCalendar();
            Long nextQuery = null;
            while (queryMap.size() > User.QUERY_MAX_ROWS) {
                nextQuery = queryMap.pollFirstEntry().getKey();
            }
            for (Long time : queryMap.descendingKeySet()) {
                User.Query query = queryMap.get(time);
                buildQueryRow(locale, builder, dateFormat, calendar, time, query, false);
            }
            if (nextQuery == null) {
                builder.append("        <tr>\n");
                if (locale.getLanguage().toLowerCase().equals("pt")) {
                    builder.append(
                            "          <td colspan=\"5\" align=\"center\">no foram encontrados outros registros</td>\n");
                } else {
                    builder.append("          <td colspan=\"5\" align=\"center\">no more records found</td>\n");
                }
                builder.append("        </tr>\n");
            } else {
                builder.append("        <tr id=\"rowMore\" class=\"click\" onclick=\"more('");
                builder.append(nextQuery);
                builder.append("')\">\n");
                if (locale.getLanguage().toLowerCase().equals("pt")) {
                    builder.append(
                            "          <td id=\"columnMore\" colspan=\"5\" align=\"center\">clique para ver mais registros</td>\n");
                } else {
                    builder.append(
                            "          <td id=\"columnMore\" colspan=\"5\" align=\"center\">click to see more records</td>\n");
                }
                builder.append("        </tr>\n");
            }
        }
    }
    return builder.toString();
}

From source file:org.opendatakit.services.database.utlities.ODKDatabaseImplUtils.java

/**
 * If the caller specified a complex json value for a structured type, flush
 * the value through to the individual columns.
 *
 * @param orderedColumns//from w ww  . j  a v  a2  s.c o  m
 * @param values
 */
private void cleanUpValuesMap(OrderedColumns orderedColumns, Map<String, Object> values) {

    TreeMap<String, String> toBeResolved = new TreeMap<String, String>();

    for (String key : values.keySet()) {
        if (DataTableColumns.CONFLICT_TYPE.equals(key)) {
            continue;
        } else if (DataTableColumns.FILTER_TYPE.equals(key)) {
            continue;
        } else if (DataTableColumns.FILTER_VALUE.equals(key)) {
            continue;
        } else if (DataTableColumns.FORM_ID.equals(key)) {
            continue;
        } else if (DataTableColumns.ID.equals(key)) {
            continue;
        } else if (DataTableColumns.LOCALE.equals(key)) {
            continue;
        } else if (DataTableColumns.ROW_ETAG.equals(key)) {
            continue;
        } else if (DataTableColumns.SAVEPOINT_CREATOR.equals(key)) {
            continue;
        } else if (DataTableColumns.SAVEPOINT_TIMESTAMP.equals(key)) {
            continue;
        } else if (DataTableColumns.SAVEPOINT_TYPE.equals(key)) {
            continue;
        } else if (DataTableColumns.SYNC_STATE.equals(key)) {
            continue;
        } else if (DataTableColumns._ID.equals(key)) {
            continue;
        }
        // OK it is one of the data columns
        ColumnDefinition cp = orderedColumns.find(key);
        if (!cp.isUnitOfRetention()) {
            toBeResolved.put(key, (String) values.get(key));
        }
    }

    // remove these non-retained values from the values set...
    for (String key : toBeResolved.keySet()) {
        values.remove(key);
    }

    while (!toBeResolved.isEmpty()) {

        TreeMap<String, String> moreToResolve = new TreeMap<String, String>();

        for (TreeMap.Entry<String, String> entry : toBeResolved.entrySet()) {
            String key = entry.getKey();
            String json = entry.getValue();
            if (json == null) {
                // don't need to do anything
                // since the value is null
                continue;
            }
            ColumnDefinition cp = orderedColumns.find(key);
            try {
                TypeReference<Map<String, Object>> reference = new TypeReference<Map<String, Object>>() {
                };
                Map<String, Object> struct = ODKFileUtils.mapper.readValue(json, reference);
                for (ColumnDefinition child : cp.getChildren()) {
                    String subkey = child.getElementKey();
                    ColumnDefinition subcp = orderedColumns.find(subkey);
                    if (subcp.isUnitOfRetention()) {
                        ElementType subtype = subcp.getType();
                        ElementDataType type = subtype.getDataType();
                        if (type == ElementDataType.integer) {
                            values.put(subkey, (Integer) struct.get(subcp.getElementName()));
                        } else if (type == ElementDataType.number) {
                            values.put(subkey, (Double) struct.get(subcp.getElementName()));
                        } else if (type == ElementDataType.bool) {
                            values.put(subkey, ((Boolean) struct.get(subcp.getElementName())) ? 1 : 0);
                        } else {
                            values.put(subkey, (String) struct.get(subcp.getElementName()));
                        }
                    } else {
                        // this must be a javascript structure... re-JSON it and save (for
                        // next round).
                        moreToResolve.put(subkey,
                                ODKFileUtils.mapper.writeValueAsString(struct.get(subcp.getElementName())));
                    }
                }
            } catch (JsonParseException e) {
                e.printStackTrace();
                throw new IllegalStateException("should not be happening");
            } catch (JsonMappingException e) {
                e.printStackTrace();
                throw new IllegalStateException("should not be happening");
            } catch (IOException e) {
                e.printStackTrace();
                throw new IllegalStateException("should not be happening");
            }
        }

        toBeResolved = moreToResolve;
    }
}

From source file:org.opendatakit.services.database.utilities.ODKDatabaseImplUtils.java

/**
 * If the caller specified a complex json value for a structured type, flush
 * the value through to the individual columns.
 *
 * @param orderedColumns//w ww .  j  av  a2 s  . com
 * @param values
 */
private void cleanUpValuesMap(OrderedColumns orderedColumns, Map<String, Object> values) {

    TreeMap<String, String> toBeResolved = new TreeMap<String, String>();

    for (String key : values.keySet()) {
        if (DataTableColumns.CONFLICT_TYPE.equals(key)) {
            continue;
        } else if (DataTableColumns.DEFAULT_ACCESS.equals(key)) {
            continue;
        } else if (DataTableColumns.ROW_OWNER.equals(key)) {
            continue;
        } else if (DataTableColumns.GROUP_READ_ONLY.equals(key)) {
            continue;
        } else if (DataTableColumns.GROUP_MODIFY.equals(key)) {
            continue;
        } else if (DataTableColumns.GROUP_PRIVILEGED.equals(key)) {
            continue;
        } else if (DataTableColumns.FORM_ID.equals(key)) {
            continue;
        } else if (DataTableColumns.ID.equals(key)) {
            continue;
        } else if (DataTableColumns.LOCALE.equals(key)) {
            continue;
        } else if (DataTableColumns.ROW_ETAG.equals(key)) {
            continue;
        } else if (DataTableColumns.SAVEPOINT_CREATOR.equals(key)) {
            continue;
        } else if (DataTableColumns.SAVEPOINT_TIMESTAMP.equals(key)) {
            continue;
        } else if (DataTableColumns.SAVEPOINT_TYPE.equals(key)) {
            continue;
        } else if (DataTableColumns.SYNC_STATE.equals(key)) {
            continue;
        } else if (DataTableColumns._ID.equals(key)) {
            continue;
        }
        // OK it is one of the data columns
        ColumnDefinition cp = orderedColumns.find(key);
        if (!cp.isUnitOfRetention()) {
            toBeResolved.put(key, (String) values.get(key));
        }
    }

    // remove these non-retained values from the values set...
    for (String key : toBeResolved.keySet()) {
        values.remove(key);
    }

    while (!toBeResolved.isEmpty()) {

        TreeMap<String, String> moreToResolve = new TreeMap<String, String>();

        for (Map.Entry<String, String> entry : toBeResolved.entrySet()) {
            String key = entry.getKey();
            String json = entry.getValue();
            if (json == null) {
                // don't need to do anything
                // since the value is null
                continue;
            }
            ColumnDefinition cp = orderedColumns.find(key);
            try {
                TypeReference<Map<String, Object>> reference = new TypeReference<Map<String, Object>>() {
                };
                Map<String, Object> struct = ODKFileUtils.mapper.readValue(json, reference);
                for (ColumnDefinition child : cp.getChildren()) {
                    String subkey = child.getElementKey();
                    ColumnDefinition subcp = orderedColumns.find(subkey);
                    if (subcp.isUnitOfRetention()) {
                        ElementType subtype = subcp.getType();
                        ElementDataType type = subtype.getDataType();
                        if (type == ElementDataType.integer) {
                            values.put(subkey, (Integer) struct.get(subcp.getElementName()));
                        } else if (type == ElementDataType.number) {
                            values.put(subkey, (Double) struct.get(subcp.getElementName()));
                        } else if (type == ElementDataType.bool) {
                            values.put(subkey, ((Boolean) struct.get(subcp.getElementName())) ? 1 : 0);
                        } else {
                            values.put(subkey, (String) struct.get(subcp.getElementName()));
                        }
                    } else {
                        // this must be a javascript structure... re-JSON it and save (for
                        // next round).
                        moreToResolve.put(subkey,
                                ODKFileUtils.mapper.writeValueAsString(struct.get(subcp.getElementName())));
                    }
                }
            } catch (JsonParseException e) {
                e.printStackTrace();
                throw new IllegalStateException("should not be happening");
            } catch (JsonMappingException e) {
                e.printStackTrace();
                throw new IllegalStateException("should not be happening");
            } catch (IOException e) {
                e.printStackTrace();
                throw new IllegalStateException("should not be happening");
            }
        }

        toBeResolved = moreToResolve;
    }
}

From source file:org.apache.hadoop.hive.metastore.MetaStoreDirectSql.java

/** Should be called with the list short enough to not trip up Oracle/etc. */
private List<Partition> getPartitionsFromPartitionIds(String dbName, String tblName, Boolean isView,
        List<Object> partIdList) throws MetaException {
    boolean doTrace = LOG.isDebugEnabled();
    int idStringWidth = (int) Math.ceil(Math.log10(partIdList.size())) + 1; // 1 for comma
    int sbCapacity = partIdList.size() * idStringWidth;
    // Prepare StringBuilder for "PART_ID in (...)" to use in future queries.
    StringBuilder partSb = new StringBuilder(sbCapacity);
    for (Object partitionId : partIdList) {
        partSb.append(extractSqlLong(partitionId)).append(",");
    }/*from  www  .  jav  a2s  .co  m*/
    String partIds = trimCommaList(partSb);

    // Get most of the fields for the IDs provided.
    // Assume db and table names are the same for all partition, as provided in arguments.
    String queryText = "select " + PARTITIONS + ".\"PART_ID\", " + SDS + ".\"SD_ID\", " + SDS + ".\"CD_ID\","
            + " " + SERDES + ".\"SERDE_ID\", " + PARTITIONS + ".\"CREATE_TIME\"," + " " + PARTITIONS
            + ".\"LAST_ACCESS_TIME\", " + SDS + ".\"INPUT_FORMAT\", " + SDS + ".\"IS_COMPRESSED\"," + " " + SDS
            + ".\"IS_STOREDASSUBDIRECTORIES\", " + SDS + ".\"LOCATION\", " + SDS + ".\"NUM_BUCKETS\"," + " "
            + SDS + ".\"OUTPUT_FORMAT\", " + SERDES + ".\"NAME\", " + SERDES + ".\"SLIB\" " + "from "
            + PARTITIONS + "" + "  left outer join " + SDS + " on " + PARTITIONS + ".\"SD_ID\" = " + SDS
            + ".\"SD_ID\" " + "  left outer join " + SERDES + " on " + SDS + ".\"SERDE_ID\" = " + SERDES
            + ".\"SERDE_ID\" " + "where \"PART_ID\" in (" + partIds + ") order by \"PART_NAME\" asc";
    long start = doTrace ? System.nanoTime() : 0;
    Query query = pm.newQuery("javax.jdo.query.SQL", queryText);
    List<Object[]> sqlResult = executeWithArray(query, null, queryText);
    long queryTime = doTrace ? System.nanoTime() : 0;
    Deadline.checkTimeout();

    // Read all the fields and create partitions, SDs and serdes.
    TreeMap<Long, Partition> partitions = new TreeMap<Long, Partition>();
    TreeMap<Long, StorageDescriptor> sds = new TreeMap<Long, StorageDescriptor>();
    TreeMap<Long, SerDeInfo> serdes = new TreeMap<Long, SerDeInfo>();
    TreeMap<Long, List<FieldSchema>> colss = new TreeMap<Long, List<FieldSchema>>();
    // Keep order by name, consistent with JDO.
    ArrayList<Partition> orderedResult = new ArrayList<Partition>(partIdList.size());

    // Prepare StringBuilder-s for "in (...)" lists to use in one-to-many queries.
    StringBuilder sdSb = new StringBuilder(sbCapacity), serdeSb = new StringBuilder(sbCapacity);
    StringBuilder colsSb = new StringBuilder(7); // We expect that there's only one field schema.
    tblName = tblName.toLowerCase();
    dbName = dbName.toLowerCase();
    for (Object[] fields : sqlResult) {
        // Here comes the ugly part...
        long partitionId = extractSqlLong(fields[0]);
        Long sdId = extractSqlLong(fields[1]);
        Long colId = extractSqlLong(fields[2]);
        Long serdeId = extractSqlLong(fields[3]);
        // A partition must have at least sdId and serdeId set, or nothing set if it's a view.
        if (sdId == null || serdeId == null) {
            if (isView == null) {
                isView = isViewTable(dbName, tblName);
            }
            if ((sdId != null || colId != null || serdeId != null) || !isView) {
                throw new MetaException("Unexpected null for one of the IDs, SD " + sdId + ", serde " + serdeId
                        + " for a " + (isView ? "" : "non-") + " view");
            }
        }

        Partition part = new Partition();
        orderedResult.add(part);
        // Set the collection fields; some code might not check presence before accessing them.
        part.setParameters(new HashMap<String, String>());
        part.setValues(new ArrayList<String>());
        part.setDbName(dbName);
        part.setTableName(tblName);
        if (fields[4] != null)
            part.setCreateTime(extractSqlInt(fields[4]));
        if (fields[5] != null)
            part.setLastAccessTime(extractSqlInt(fields[5]));
        partitions.put(partitionId, part);

        if (sdId == null)
            continue; // Probably a view.
        assert serdeId != null;

        // We assume each partition has an unique SD.
        StorageDescriptor sd = new StorageDescriptor();
        StorageDescriptor oldSd = sds.put(sdId, sd);
        if (oldSd != null) {
            throw new MetaException("Partitions reuse SDs; we don't expect that");
        }
        // Set the collection fields; some code might not check presence before accessing them.
        sd.setSortCols(new ArrayList<Order>());
        sd.setBucketCols(new ArrayList<String>());
        sd.setParameters(new HashMap<String, String>());
        sd.setSkewedInfo(new SkewedInfo(new ArrayList<String>(), new ArrayList<List<String>>(),
                new HashMap<List<String>, String>()));
        sd.setInputFormat((String) fields[6]);
        Boolean tmpBoolean = extractSqlBoolean(fields[7]);
        if (tmpBoolean != null)
            sd.setCompressed(tmpBoolean);
        tmpBoolean = extractSqlBoolean(fields[8]);
        if (tmpBoolean != null)
            sd.setStoredAsSubDirectories(tmpBoolean);
        sd.setLocation((String) fields[9]);
        if (fields[10] != null)
            sd.setNumBuckets(extractSqlInt(fields[10]));
        sd.setOutputFormat((String) fields[11]);
        sdSb.append(sdId).append(",");
        part.setSd(sd);

        if (colId != null) {
            List<FieldSchema> cols = colss.get(colId);
            // We expect that colId will be the same for all (or many) SDs.
            if (cols == null) {
                cols = new ArrayList<FieldSchema>();
                colss.put(colId, cols);
                colsSb.append(colId).append(",");
            }
            sd.setCols(cols);
        }

        // We assume each SD has an unique serde.
        SerDeInfo serde = new SerDeInfo();
        SerDeInfo oldSerde = serdes.put(serdeId, serde);
        if (oldSerde != null) {
            throw new MetaException("SDs reuse serdes; we don't expect that");
        }
        serde.setParameters(new HashMap<String, String>());
        serde.setName((String) fields[12]);
        serde.setSerializationLib((String) fields[13]);
        serdeSb.append(serdeId).append(",");
        sd.setSerdeInfo(serde);
        Deadline.checkTimeout();
    }
    query.closeAll();
    timingTrace(doTrace, queryText, start, queryTime);

    // Now get all the one-to-many things. Start with partitions.
    queryText = "select \"PART_ID\", \"PARAM_KEY\", \"PARAM_VALUE\" from " + PARTITION_PARAMS + ""
            + " where \"PART_ID\" in (" + partIds + ") and \"PARAM_KEY\" is not null"
            + " order by \"PART_ID\" asc";
    loopJoinOrderedResult(partitions, queryText, 0, new ApplyFunc<Partition>() {
        @Override
        public void apply(Partition t, Object[] fields) {
            t.putToParameters((String) fields[1], (String) fields[2]);
        }
    });
    // Perform conversion of null map values
    for (Partition t : partitions.values()) {
        t.setParameters(MetaStoreUtils.trimMapNulls(t.getParameters(), convertMapNullsToEmptyStrings));
    }

    queryText = "select \"PART_ID\", \"PART_KEY_VAL\" from " + PARTITION_KEY_VALS + ""
            + " where \"PART_ID\" in (" + partIds + ") and \"INTEGER_IDX\" >= 0"
            + " order by \"PART_ID\" asc, \"INTEGER_IDX\" asc";
    loopJoinOrderedResult(partitions, queryText, 0, new ApplyFunc<Partition>() {
        @Override
        public void apply(Partition t, Object[] fields) {
            t.addToValues((String) fields[1]);
        }
    });

    // Prepare IN (blah) lists for the following queries. Cut off the final ','s.
    if (sdSb.length() == 0) {
        assert serdeSb.length() == 0 && colsSb.length() == 0;
        return orderedResult; // No SDs, probably a view.
    }

    String sdIds = trimCommaList(sdSb);
    String serdeIds = trimCommaList(serdeSb);
    String colIds = trimCommaList(colsSb);

    // Get all the stuff for SD. Don't do empty-list check - we expect partitions do have SDs.
    queryText = "select \"SD_ID\", \"PARAM_KEY\", \"PARAM_VALUE\" from " + SD_PARAMS + ""
            + " where \"SD_ID\" in (" + sdIds + ") and \"PARAM_KEY\" is not null" + " order by \"SD_ID\" asc";
    loopJoinOrderedResult(sds, queryText, 0, new ApplyFunc<StorageDescriptor>() {
        @Override
        public void apply(StorageDescriptor t, Object[] fields) {
            t.putToParameters((String) fields[1], extractSqlClob(fields[2]));
        }
    });
    // Perform conversion of null map values
    for (StorageDescriptor t : sds.values()) {
        t.setParameters(MetaStoreUtils.trimMapNulls(t.getParameters(), convertMapNullsToEmptyStrings));
    }

    queryText = "select \"SD_ID\", \"COLUMN_NAME\", " + SORT_COLS + ".\"ORDER\"" + " from " + SORT_COLS + ""
            + " where \"SD_ID\" in (" + sdIds + ") and \"INTEGER_IDX\" >= 0"
            + " order by \"SD_ID\" asc, \"INTEGER_IDX\" asc";
    loopJoinOrderedResult(sds, queryText, 0, new ApplyFunc<StorageDescriptor>() {
        @Override
        public void apply(StorageDescriptor t, Object[] fields) {
            if (fields[2] == null)
                return;
            t.addToSortCols(new Order((String) fields[1], extractSqlInt(fields[2])));
        }
    });

    queryText = "select \"SD_ID\", \"BUCKET_COL_NAME\" from " + BUCKETING_COLS + "" + " where \"SD_ID\" in ("
            + sdIds + ") and \"INTEGER_IDX\" >= 0" + " order by \"SD_ID\" asc, \"INTEGER_IDX\" asc";
    loopJoinOrderedResult(sds, queryText, 0, new ApplyFunc<StorageDescriptor>() {
        @Override
        public void apply(StorageDescriptor t, Object[] fields) {
            t.addToBucketCols((String) fields[1]);
        }
    });

    // Skewed columns stuff.
    queryText = "select \"SD_ID\", \"SKEWED_COL_NAME\" from " + SKEWED_COL_NAMES + "" + " where \"SD_ID\" in ("
            + sdIds + ") and \"INTEGER_IDX\" >= 0" + " order by \"SD_ID\" asc, \"INTEGER_IDX\" asc";
    boolean hasSkewedColumns = loopJoinOrderedResult(sds, queryText, 0, new ApplyFunc<StorageDescriptor>() {
        @Override
        public void apply(StorageDescriptor t, Object[] fields) {
            if (!t.isSetSkewedInfo())
                t.setSkewedInfo(new SkewedInfo());
            t.getSkewedInfo().addToSkewedColNames((String) fields[1]);
        }
    }) > 0;

    // Assume we don't need to fetch the rest of the skewed column data if we have no columns.
    if (hasSkewedColumns) {
        // We are skipping the SKEWED_STRING_LIST table here, as it seems to be totally useless.
        queryText = "select " + SKEWED_VALUES + ".\"SD_ID_OID\"," + "  " + SKEWED_STRING_LIST_VALUES
                + ".\"STRING_LIST_ID\"," + "  " + SKEWED_STRING_LIST_VALUES + ".\"STRING_LIST_VALUE\" "
                + "from " + SKEWED_VALUES + " " + "  left outer join " + SKEWED_STRING_LIST_VALUES + " on "
                + SKEWED_VALUES + "." + "\"STRING_LIST_ID_EID\" = " + SKEWED_STRING_LIST_VALUES
                + ".\"STRING_LIST_ID\" " + "where " + SKEWED_VALUES + ".\"SD_ID_OID\" in (" + sdIds + ") "
                + "  and " + SKEWED_VALUES + ".\"STRING_LIST_ID_EID\" is not null " + "  and " + SKEWED_VALUES
                + ".\"INTEGER_IDX\" >= 0 " + "order by " + SKEWED_VALUES + ".\"SD_ID_OID\" asc, "
                + SKEWED_VALUES + ".\"INTEGER_IDX\" asc," + "  " + SKEWED_STRING_LIST_VALUES
                + ".\"INTEGER_IDX\" asc";
        loopJoinOrderedResult(sds, queryText, 0, new ApplyFunc<StorageDescriptor>() {
            private Long currentListId;
            private List<String> currentList;

            @Override
            public void apply(StorageDescriptor t, Object[] fields) throws MetaException {
                if (!t.isSetSkewedInfo())
                    t.setSkewedInfo(new SkewedInfo());
                // Note that this is not a typical list accumulator - there's no call to finalize
                // the last list. Instead we add list to SD first, as well as locally to add elements.
                if (fields[1] == null) {
                    currentList = null; // left outer join produced a list with no values
                    currentListId = null;
                    t.getSkewedInfo().addToSkewedColValues(Collections.<String>emptyList());
                } else {
                    long fieldsListId = extractSqlLong(fields[1]);
                    if (currentListId == null || fieldsListId != currentListId) {
                        currentList = new ArrayList<String>();
                        currentListId = fieldsListId;
                        t.getSkewedInfo().addToSkewedColValues(currentList);
                    }
                    currentList.add((String) fields[2]);
                }
            }
        });

        // We are skipping the SKEWED_STRING_LIST table here, as it seems to be totally useless.
        queryText = "select " + SKEWED_COL_VALUE_LOC_MAP + ".\"SD_ID\"," + " " + SKEWED_STRING_LIST_VALUES
                + ".STRING_LIST_ID," + " " + SKEWED_COL_VALUE_LOC_MAP + ".\"LOCATION\"," + " "
                + SKEWED_STRING_LIST_VALUES + ".\"STRING_LIST_VALUE\" " + "from " + SKEWED_COL_VALUE_LOC_MAP
                + "" + "  left outer join " + SKEWED_STRING_LIST_VALUES + " on " + SKEWED_COL_VALUE_LOC_MAP
                + "." + "\"STRING_LIST_ID_KID\" = " + SKEWED_STRING_LIST_VALUES + ".\"STRING_LIST_ID\" "
                + "where " + SKEWED_COL_VALUE_LOC_MAP + ".\"SD_ID\" in (" + sdIds + ")" + "  and "
                + SKEWED_COL_VALUE_LOC_MAP + ".\"STRING_LIST_ID_KID\" is not null " + "order by "
                + SKEWED_COL_VALUE_LOC_MAP + ".\"SD_ID\" asc," + "  " + SKEWED_STRING_LIST_VALUES
                + ".\"STRING_LIST_ID\" asc," + "  " + SKEWED_STRING_LIST_VALUES + ".\"INTEGER_IDX\" asc";

        loopJoinOrderedResult(sds, queryText, 0, new ApplyFunc<StorageDescriptor>() {
            private Long currentListId;
            private List<String> currentList;

            @Override
            public void apply(StorageDescriptor t, Object[] fields) throws MetaException {
                if (!t.isSetSkewedInfo()) {
                    SkewedInfo skewedInfo = new SkewedInfo();
                    skewedInfo.setSkewedColValueLocationMaps(new HashMap<List<String>, String>());
                    t.setSkewedInfo(skewedInfo);
                }
                Map<List<String>, String> skewMap = t.getSkewedInfo().getSkewedColValueLocationMaps();
                // Note that this is not a typical list accumulator - there's no call to finalize
                // the last list. Instead we add list to SD first, as well as locally to add elements.
                if (fields[1] == null) {
                    currentList = new ArrayList<String>(); // left outer join produced a list with no values
                    currentListId = null;
                } else {
                    long fieldsListId = extractSqlLong(fields[1]);
                    if (currentListId == null || fieldsListId != currentListId) {
                        currentList = new ArrayList<String>();
                        currentListId = fieldsListId;
                    } else {
                        skewMap.remove(currentList); // value based compare.. remove first
                    }
                    currentList.add((String) fields[3]);
                }
                skewMap.put(currentList, (String) fields[2]);
            }
        });
    } // if (hasSkewedColumns)

    // Get FieldSchema stuff if any.
    if (!colss.isEmpty()) {
        // We are skipping the CDS table here, as it seems to be totally useless.
        queryText = "select \"CD_ID\", \"COMMENT\", \"COLUMN_NAME\", \"TYPE_NAME\"" + " from " + COLUMNS_V2
                + " where \"CD_ID\" in (" + colIds + ") and \"INTEGER_IDX\" >= 0"
                + " order by \"CD_ID\" asc, \"INTEGER_IDX\" asc";
        loopJoinOrderedResult(colss, queryText, 0, new ApplyFunc<List<FieldSchema>>() {
            @Override
            public void apply(List<FieldSchema> t, Object[] fields) {
                t.add(new FieldSchema((String) fields[2], extractSqlClob(fields[3]), (String) fields[1]));
            }
        });
    }

    // Finally, get all the stuff for serdes - just the params.
    queryText = "select \"SERDE_ID\", \"PARAM_KEY\", \"PARAM_VALUE\" from " + SERDE_PARAMS + ""
            + " where \"SERDE_ID\" in (" + serdeIds + ") and \"PARAM_KEY\" is not null"
            + " order by \"SERDE_ID\" asc";
    loopJoinOrderedResult(serdes, queryText, 0, new ApplyFunc<SerDeInfo>() {
        @Override
        public void apply(SerDeInfo t, Object[] fields) {
            t.putToParameters((String) fields[1], extractSqlClob(fields[2]));
        }
    });
    // Perform conversion of null map values
    for (SerDeInfo t : serdes.values()) {
        t.setParameters(MetaStoreUtils.trimMapNulls(t.getParameters(), convertMapNullsToEmptyStrings));
    }

    return orderedResult;
}

From source file:org.ncic.bioinfo.sparkseq.algorithms.walker.mutect.Mutect.java

@Override
protected void map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext rawContext) {

    final char upRef = Character.toUpperCase((char) ref.getBase());
    if (upRef != 'A' && upRef != 'C' && upRef != 'G' && upRef != 'T') {
        return;/*from   w  w w  .  j a  va  2s.c  o  m*/
    }

    ReadBackedPileup tumorPileup = cleanNoneRefPileupElement(rawContext.getBasePileup());
    ReadBackedPileup normalPileup = cleanNoneRefPileupElement(normalSamTraverser.next().getBasePileup());
    // an optimization to speed things up when there is no coverage
    if (tumorPileup.depthOfCoverage() == 0 && normalPileup.depthOfCoverage() == 0) {
        return;
    }

    TreeMap<Double, CandidateMutation> messageByTumorLod = new TreeMap<Double, CandidateMutation>();
    // get sequence context around mutation
    String sequenceContext = SequenceUtils.createSequenceContext(this.refContentProvider, ref, 3);

    try {
        final LocusReadPile tumorReadPile = new LocusReadPile(tumorPileup, upRef, MTAC.MIN_QSCORE,
                MIN_QSUM_QSCORE, false, MTAC.ARTIFACT_DETECTION_MODE, MTAC.ENABLE_QSCORE_OUTPUT);
        final LocusReadPile normalReadPile = new LocusReadPile(normalPileup, upRef, MTAC.MIN_QSCORE, 0,
                this.USE_MAPQ0_IN_NORMAL_QSCORE, true, MTAC.ENABLE_QSCORE_OUTPUT);

        Collection<VariantContext> panelOfNormalsVC = tracker.getValues(normalPanelRod,
                rawContext.getLocation());
        Collection<VariantContext> cosmicVC = getVCInTrackerInLocus(RODNames.COSMIC, tracker);
        Collection<VariantContext> dbsnpVC = getVCInTrackerInLocus(RODNames.DBSNP, tracker);

        // remove the effect of cosmic from dbSNP
        boolean germlineAtRisk = (!dbsnpVC.isEmpty() && cosmicVC.isEmpty());

        // compute coverage flags
        int tumorCoveredDepthThreshold = 14;
        int normalCoveredDepthThreshold = (germlineAtRisk) ? 19 : 8;
        if (!hasNormalBam) {
            normalCoveredDepthThreshold = 0;
        }

        int tumorBaseCount = tumorReadPile.finalPileupReads.size();
        int normalBaseCount = normalReadPile.finalPileupReads.size();
        boolean isTumorCovered = tumorBaseCount >= tumorCoveredDepthThreshold;
        boolean isNormalCovered = normalBaseCount >= normalCoveredDepthThreshold;
        boolean isBaseCovered = isTumorCovered && isNormalCovered;
        if (!hasNormalBam) {
            isBaseCovered = isTumorCovered;
        }

        int tumorQ20BaseCount = tumorReadPile.getFilteredBaseCount(20);
        int normalQ20BaseCount = normalReadPile.getFilteredBaseCount(20);

        // calculate power
        double tumorPower = tumorPowerCalculator.cachingPowerCalculation(tumorBaseCount,
                MTAC.POWER_CONSTANT_AF);

        double normalPowerNoSNPPrior = normalNovelSitePowerCalculator.cachingPowerCalculation(normalBaseCount);
        double normalPowerWithSNPPrior = normalDbSNPSitePowerCalculator
                .cachingPowerCalculation(normalBaseCount);

        double normalPower = (germlineAtRisk) ? normalPowerWithSNPPrior : normalPowerNoSNPPrior;

        double combinedPower = tumorPower * normalPower;
        if (!hasNormalBam) {
            combinedPower = tumorPower;
        }

        int mapQ0Reads = tumorReadPile.qualityScoreFilteredPileup.getNumberOfMappingQualityZeroReads()
                + normalReadPile.qualityScoreFilteredPileup.getNumberOfMappingQualityZeroReads();

        int totalReads = tumorReadPile.qualityScoreFilteredPileup.depthOfCoverage()
                + normalReadPile.qualityScoreFilteredPileup.depthOfCoverage();

        // Test each of the possible alternate alleles
        for (final char altAllele : new char[] { 'A', 'C', 'G', 'T' }) {
            if (altAllele == upRef) {
                continue;
            }
            if (!MTAC.FORCE_OUTPUT && tumorReadPile.qualitySums.getCounts(altAllele) == 0) {
                continue;
            }

            CandidateMutation candidate = new CandidateMutation(rawContext.getLocation(), upRef);
            candidate.setSequenceContext(sequenceContext);
            candidate.setTumorSampleName(MTAC.TUMOR_SAMPLE_NAME);
            candidate.setNormalSampleName(MTAC.NORMAL_SAMPLE_NAME);
            candidate.setCovered(isBaseCovered);
            candidate.setPower(combinedPower);
            candidate.setTumorPower(tumorPower);
            candidate.setNormalPower(normalPower);
            candidate.setNormalPowerWithSNPPrior(normalPowerWithSNPPrior);
            candidate.setNormalPowerNoSNPPrior(normalPowerNoSNPPrior);
            candidate.setTumorQ20Count(tumorQ20BaseCount);
            candidate.setNormalQ20Count(normalQ20BaseCount);
            candidate.setInitialTumorNonRefQualitySum(tumorReadPile.qualitySums.getOtherQualities(upRef));
            candidate.setAltAllele(altAllele);
            candidate.setMapQ0Reads(mapQ0Reads);
            candidate.setTotalReads(totalReads);
            candidate.setContaminationFraction(MTAC.FRACTION_CONTAMINATION);
            candidate.setPanelOfNormalsVC(
                    panelOfNormalsVC.isEmpty() ? null : panelOfNormalsVC.iterator().next()); // if there are multiple, we're just grabbing the first
            candidate.setCosmicSite(!cosmicVC.isEmpty());
            candidate.setDbsnpSite(!dbsnpVC.isEmpty());
            candidate.setDbsnpVC(dbsnpVC.isEmpty() ? null : dbsnpVC.iterator().next());
            candidate.setTumorF(tumorReadPile.estimateAlleleFraction(upRef, altAllele));

            if (!MTAC.FORCE_OUTPUT && candidate.getTumorF() < MTAC.TUMOR_F_PRETEST) {
                continue;
            }

            candidate.setInitialTumorAltCounts(tumorReadPile.qualitySums.getCounts(altAllele));
            candidate.setInitialTumorRefCounts(tumorReadPile.qualitySums.getCounts(upRef));
            candidate.setInitialTumorAltQualitySum(tumorReadPile.qualitySums.getQualitySum(altAllele));
            candidate.setInitialTumorRefQualitySum(tumorReadPile.qualitySums.getQualitySum(upRef));

            double tumorLod = tumorReadPile.calculateAltVsRefLOD((byte) altAllele, candidate.getTumorF(), 0);
            candidate.setTumorLodFStar(tumorLod);

            candidate.setInitialTumorReadDepth(tumorReadPile.finalPileupReads.size());
            candidate.setTumorInsertionCount(tumorReadPile.getInsertionsCount());
            candidate.setTumorDeletionCount(tumorReadPile.getDeletionsCount());

            if (candidate.getTumorLodFStar() < MTAC.INITIAL_TUMOR_LOD_THRESHOLD) {
                continue;
            }

            // calculate lod of contaminant
            double contaminantF = Math.min(contaminantAlternateFraction, candidate.getTumorF());
            VariableAllelicRatioGenotypeLikelihoods contaminantLikelihoods = new VariableAllelicRatioGenotypeLikelihoods(
                    upRef, contaminantF);

            List<PileupElement> peList = new ArrayList<PileupElement>(
                    tumorReadPile.finalPileup.depthOfCoverage());
            for (PileupElement pe : tumorReadPile.finalPileup) {
                peList.add(pe);
            }

            Collections.sort(peList, new PileupComparatorByAltRefQual((byte) altAllele));
            int readsToKeep = (int) (peList.size() * contaminantAlternateFraction);

            for (PileupElement pe : peList) {
                byte base = pe.getBase();
                if (pe.getBase() == altAllele) {
                    // if we've retained all we need, then turn the remainder of alts to ref
                    if (readsToKeep == 0) {
                        base = (byte) upRef;
                    } else {
                        readsToKeep--;
                    }
                }

                contaminantLikelihoods.add(base, pe.getQual());
            }
            double[] refHetHom = LocusReadPile.extractRefHetHom(contaminantLikelihoods, upRef, altAllele);
            double contaminantLod = refHetHom[1] - refHetHom[0];
            candidate.setContaminantLod(contaminantLod);

            final QualitySums normQs = normalReadPile.qualitySums;

            VariableAllelicRatioGenotypeLikelihoods normalGl = normalReadPile
                    .calculateLikelihoods(normalReadPile.qualityScoreFilteredPileup); // use MAPQ0 reads
            candidate.setInitialNormalBestGenotype(normalReadPile.getBestGenotype(normalGl));
            candidate.setInitialNormalLod(LocusReadPile.getRefVsAlt(normalGl, upRef, altAllele));

            double normalF = Math.max(LocusReadPile
                    .estimateAlleleFraction(normalReadPile.qualityScoreFilteredPileup, upRef, altAllele),
                    MTAC.MINIMUM_NORMAL_ALLELE_FRACTION);
            candidate.setNormalF(normalF);

            candidate.setInitialNormalAltQualitySum(normQs.getQualitySum(altAllele));
            candidate.setInitialNormalRefQualitySum(normQs.getQualitySum(upRef));

            candidate.setNormalAltQualityScores(normQs.getBaseQualityScores(altAllele));
            candidate.setNormalRefQualityScores(normQs.getBaseQualityScores(upRef));

            candidate.setInitialNormalAltCounts(normQs.getCounts(altAllele));
            candidate.setInitialNormalRefCounts(normQs.getCounts(upRef));
            candidate.setInitialNormalReadDepth(normalReadPile.finalPileupReads.size());

            // TODO: parameterize filtering Mate-Rescued Reads (if someone wants to disable this)
            final LocusReadPile t2 = filterReads(ref, tumorReadPile.finalPileup, true);

            // if there are no reads remaining, abandon this theory
            if (!MTAC.FORCE_OUTPUT && t2.finalPileupReads.size() == 0) {
                continue;
            }

            candidate.setInitialTumorAltCounts(t2.qualitySums.getCounts(altAllele));
            candidate.setInitialTumorRefCounts(t2.qualitySums.getCounts(upRef));
            candidate.setInitialTumorAltQualitySum(t2.qualitySums.getQualitySum(altAllele));
            candidate.setInitialTumorRefQualitySum(t2.qualitySums.getQualitySum(upRef));

            candidate.setTumorAltQualityScores(t2.qualitySums.getBaseQualityScores(altAllele));
            candidate.setTumorRefQualityScores(t2.qualitySums.getBaseQualityScores(upRef));

            VariableAllelicRatioGenotypeLikelihoods t2Gl = t2.calculateLikelihoods(t2.finalPileup);
            candidate.setInitialTumorLod(t2.getAltVsRef(t2Gl, upRef, altAllele));
            candidate.setInitialTumorReadDepth(t2.finalPileupReads.size());

            candidate.setTumorF(t2.estimateAlleleFraction(upRef, altAllele));
            double tumorLod2 = t2.calculateAltVsRefLOD((byte) altAllele, candidate.getTumorF(), 0);
            candidate.setTumorLodFStar(tumorLod2);

            //TODO: clean up use of forward/reverse vs positive/negative (prefer the latter since GATK uses it)
            ReadBackedPileup forwardPileup = filterReads(ref, tumorReadPile.finalPileupPositiveStrand,
                    true).finalPileupPositiveStrand;
            double f2forward = LocusReadPile.estimateAlleleFraction(forwardPileup, upRef, altAllele);
            candidate.setTumorLodFStarForward(
                    t2.calculateAltVsRefLOD(forwardPileup, (byte) altAllele, f2forward, 0.0));

            ReadBackedPileup reversePileup = filterReads(ref, tumorReadPile.finalPileupNegativeStrand,
                    true).finalPileupNegativeStrand;
            double f2reverse = LocusReadPile.estimateAlleleFraction(reversePileup, upRef, altAllele);
            candidate.setTumorLodFStarReverse(
                    t2.calculateAltVsRefLOD(reversePileup, (byte) altAllele, f2reverse, 0.0));

            // calculate strand bias power
            candidate.setPowerToDetectPositiveStrandArtifact(strandArtifactPowerCalculator
                    .cachingPowerCalculation(reversePileup.depthOfCoverage(), candidate.getTumorF()));
            candidate.setPowerToDetectNegativeStrandArtifact(strandArtifactPowerCalculator
                    .cachingPowerCalculation(forwardPileup.depthOfCoverage(), candidate.getTumorF()));

            candidate.setStrandContingencyTable(SequenceUtils.getStrandContingencyTable(forwardPileup,
                    reversePileup, (byte) upRef, (byte) altAllele));

            ArrayList<PileupElement> mutantPileupElements = new ArrayList<PileupElement>();
            ArrayList<PileupElement> referencePileupElements = new ArrayList<PileupElement>();

            for (PileupElement p : t2.finalPileup) {
                final SAMRecord read = p.getRead();
                final int offset = p.getOffset();

                if (read.getReadString().charAt(offset) == altAllele) {
                    mutantPileupElements.add(p);
                } else if (read.getReadString().charAt(offset) == upRef) {
                    referencePileupElements.add(p);
                } else {
                    // just drop the read...
                }
            }

            ReadBackedPileup mutantPileup = new ReadBackedPileupImpl(rawContext.getLocation(),
                    mutantPileupElements);

            ReadBackedPileup referencePileup = new ReadBackedPileupImpl(rawContext.getLocation(),
                    referencePileupElements);

            // TODO: shouldn't this be refAllele here?
            final LocusReadPile mutantPile = new LocusReadPile(mutantPileup, altAllele, 0, 0,
                    MTAC.ENABLE_QSCORE_OUTPUT);
            final LocusReadPile refPile = new LocusReadPile(referencePileup, altAllele, 0, 0,
                    MTAC.ENABLE_QSCORE_OUTPUT);

            // Set the maximum observed mapping quality score for the reference and alternate alleles
            int[] rmq = referencePileup.getMappingQuals();
            candidate.setTumorRefMaxMapQ((rmq.length == 0) ? 0 : NumberUtils.max(rmq));

            int[] amq = mutantPileup.getMappingQuals();
            candidate.setTumorAltMaxMapQ((amq.length == 0) ? 0 : NumberUtils.max(amq));

            // start with just the tumor pile
            candidate.setTumorAltForwardOffsetsInRead(SequenceUtils.getForwardOffsetsInRead(mutantPileup));
            candidate.setTumorAltReverseOffsetsInRead(SequenceUtils.getReverseOffsetsInRead(mutantPileup));

            if (candidate.getTumorAltForwardOffsetsInRead().size() > 0) {
                double[] offsets = MuTectStats
                        .convertIntegersToDoubles(candidate.getTumorAltForwardOffsetsInRead());
                double median = MuTectStats.getMedian(offsets);
                candidate.setTumorForwardOffsetsInReadMedian(median);
                candidate.setTumorForwardOffsetsInReadMad(MuTectStats.calculateMAD(offsets, median));
            }

            if (candidate.getTumorAltReverseOffsetsInRead().size() > 0) {
                double[] offsets = MuTectStats
                        .convertIntegersToDoubles(candidate.getTumorAltReverseOffsetsInRead());
                double median = MuTectStats.getMedian(offsets);
                candidate.setTumorReverseOffsetsInReadMedian(median);
                candidate.setTumorReverseOffsetsInReadMad(MuTectStats.calculateMAD(offsets, median));
            }

            // test to see if the candidate should be rejected
            performRejection(candidate);

            messageByTumorLod.put(candidate.getInitialTumorLod(), candidate);

        }

        // if more than one site passes the tumor lod threshold for KEEP the fail the tri_allelic Site filter
        int passingCandidates = 0;
        for (CandidateMutation c : messageByTumorLod.values()) {
            if (c.getTumorLodFStar() >= MTAC.TUMOR_LOD_THRESHOLD) {
                passingCandidates++;
            }
        }

        if (passingCandidates > 1) {
            for (CandidateMutation c : messageByTumorLod.values()) {
                c.addRejectionReason("triallelic_site");
            }
        }

        // write out the call stats for the "best" candidate
        if (!messageByTumorLod.isEmpty()) {
            CandidateMutation m = messageByTumorLod.lastEntry().getValue();

            // only output passing calls OR rejected sites if ONLY_PASSING_CALLS is not specified
            if (!m.isRejected() || (m.isRejected() && !MTAC.ONLY_PASSING_CALLS)) {

                //out.println(callStatsGenerator.generateCallStats(m));
                resultVCFOutInfos.add(callStatsGenerator.generateCallStats(m));
                resultVCFRecords.add(VCFGenerator.generateVC(m));
            }
        }
    } catch (MathException me) {
        throw new GATKException(me.getMessage());
    }
}

From source file:org.broadinstitute.cga.tools.gatk.walkers.cancer.mutect.MuTect.java

@Override
public Integer map(final RefMetaDataTracker tracker, final ReferenceContext ref,
        final AlignmentContext rawContext) {
    if (MTAC.NOOP)
        return 0;

    TreeMap<Double, CandidateMutation> messageByTumorLod = new TreeMap<Double, CandidateMutation>();

    ReadBackedPileup pileup = rawContext.getBasePileup();
    int numberOfReads = pileup.depthOfCoverage();
    binReadsProcessed += numberOfReads;//from   w w w.  j  a  va 2s .  com

    if (binReadsProcessed >= 1000000) {
        long time = System.currentTimeMillis();
        long elapsedTime = time - lastTime;
        lastTime = time;

        totalReadsProcessed += binReadsProcessed;
        binReadsProcessed = 0;

        logger.info(String.format("[MUTECT] Processed %d reads in %d ms", totalReadsProcessed, elapsedTime));
    }

    // an optimization to speed things up when there is no coverage
    if (!MTAC.FORCE_OUTPUT && numberOfReads == 0) {
        return -1;
    }

    // get sequence context around mutation
    String sequenceContext = SequenceUtils.createSequenceContext(ref, 3);

    // only process bases where the reference is [ACGT], because the FASTA for HG18 has N,M and R!
    final char upRef = Character.toUpperCase(ref.getBaseAsChar());
    if (upRef != 'A' && upRef != 'C' && upRef != 'G' && upRef != 'T') {
        return -1;
    }

    try {

        Map<SampleType, ReadBackedPileup> pileupMap = getPileupsBySampleType(pileup);

        final LocusReadPile tumorReadPile = new LocusReadPile(pileupMap.get(SampleType.TUMOR), upRef,
                MTAC.MIN_QSCORE, MIN_QSUM_QSCORE, false, MTAC.ARTIFACT_DETECTION_MODE,
                MTAC.ENABLE_QSCORE_OUTPUT);
        final LocusReadPile normalReadPile = new LocusReadPile(pileupMap.get(SampleType.NORMAL), upRef,
                MTAC.MIN_QSCORE, 0, this.USE_MAPQ0_IN_NORMAL_QSCORE, true, MTAC.ENABLE_QSCORE_OUTPUT);

        Collection<VariantContext> panelOfNormalsVC = tracker.getValues(normalPanelRod,
                rawContext.getLocation());
        Collection<VariantContext> cosmicVC = tracker.getValues(cosmicRod, rawContext.getLocation());
        Collection<VariantContext> dbsnpVC = tracker.getValues(dbsnpRod, rawContext.getLocation());

        // remove the effect of cosmic from dbSNP
        boolean germlineAtRisk = (!dbsnpVC.isEmpty() && cosmicVC.isEmpty());

        // compute coverage flags
        int tumorCoveredDepthThreshold = 14;
        int normalCoveredDepthThreshold = (germlineAtRisk) ? 19 : 8;
        if (!hasNormalBam) {
            normalCoveredDepthThreshold = 0;
        }

        int tumorBaseCount = tumorReadPile.finalPileupReads.size();
        int normalBaseCount = normalReadPile.finalPileupReads.size();
        boolean isTumorCovered = tumorBaseCount >= tumorCoveredDepthThreshold;
        boolean isNormalCovered = normalBaseCount >= normalCoveredDepthThreshold;
        boolean isBaseCovered = isTumorCovered && isNormalCovered;
        if (!hasNormalBam) {
            isBaseCovered = isTumorCovered;
        }

        stdCovWriter.writeCoverage(rawContext, isBaseCovered);
        int tumorQ20BaseCount = tumorReadPile.getFilteredBaseCount(20);
        int normalQ20BaseCount = normalReadPile.getFilteredBaseCount(20);
        q20CovWriter.writeCoverage(rawContext, tumorQ20BaseCount >= 20 && normalQ20BaseCount >= 20);
        tumorDepthWriter.writeCoverage(rawContext, tumorBaseCount);
        normalDepthWriter.writeCoverage(rawContext, normalBaseCount);

        // calculate power
        double tumorPower = tumorPowerCalculator.cachingPowerCalculation(tumorBaseCount,
                MTAC.POWER_CONSTANT_AF);

        double normalPowerNoSNPPrior = normalNovelSitePowerCalculator.cachingPowerCalculation(normalBaseCount);
        double normalPowerWithSNPPrior = normalDbSNPSitePowerCalculator
                .cachingPowerCalculation(normalBaseCount);

        double normalPower = (germlineAtRisk) ? normalPowerWithSNPPrior : normalPowerNoSNPPrior;

        double combinedPower = tumorPower * normalPower;
        if (!hasNormalBam) {
            combinedPower = tumorPower;
        }

        powerWriter.writeCoverage(rawContext, combinedPower);

        int mapQ0Reads = tumorReadPile.qualityScoreFilteredPileup.getNumberOfMappingQualityZeroReads()
                + normalReadPile.qualityScoreFilteredPileup.getNumberOfMappingQualityZeroReads();

        int totalReads = tumorReadPile.qualityScoreFilteredPileup.depthOfCoverage()
                + normalReadPile.qualityScoreFilteredPileup.depthOfCoverage();

        // Test each of the possible alternate alleles
        for (final char altAllele : new char[] { 'A', 'C', 'G', 'T' }) {
            if (altAllele == upRef) {
                continue;
            }
            if (!MTAC.FORCE_OUTPUT && tumorReadPile.qualitySums.getCounts(altAllele) == 0) {
                continue;
            }

            CandidateMutation candidate = new CandidateMutation(rawContext.getLocation(), upRef);
            candidate.setSequenceContext(sequenceContext);
            candidate.setTumorSampleName(MTAC.TUMOR_SAMPLE_NAME);
            candidate.setNormalSampleName(MTAC.NORMAL_SAMPLE_NAME);
            candidate.setCovered(isBaseCovered);
            candidate.setPower(combinedPower);
            candidate.setTumorPower(tumorPower);
            candidate.setNormalPower(normalPower);
            candidate.setNormalPowerWithSNPPrior(normalPowerWithSNPPrior);
            candidate.setNormalPowerNoSNPPrior(normalPowerNoSNPPrior);
            candidate.setTumorQ20Count(tumorQ20BaseCount);
            candidate.setNormalQ20Count(normalQ20BaseCount);
            candidate.setInitialTumorNonRefQualitySum(tumorReadPile.qualitySums.getOtherQualities(upRef));
            candidate.setAltAllele(altAllele);
            candidate.setMapQ0Reads(mapQ0Reads);
            candidate.setTotalReads(totalReads);
            candidate.setContaminationFraction(MTAC.FRACTION_CONTAMINATION);
            candidate.setPanelOfNormalsVC(
                    panelOfNormalsVC.isEmpty() ? null : panelOfNormalsVC.iterator().next()); // if there are multiple, we're just grabbing the first
            candidate.setCosmicSite(!cosmicVC.isEmpty());
            candidate.setDbsnpSite(!dbsnpVC.isEmpty());
            candidate.setDbsnpVC(dbsnpVC.isEmpty() ? null : dbsnpVC.iterator().next());
            candidate.setTumorF(tumorReadPile.estimateAlleleFraction(upRef, altAllele));

            if (!MTAC.FORCE_OUTPUT && candidate.getTumorF() < MTAC.TUMOR_F_PRETEST) {
                continue;
            }

            if (++candidatesInspected % 1000 == 0) {
                logger.info(String.format("[MUTECT] Inspected %d potential candidates", candidatesInspected));
            }

            candidate.setInitialTumorAltCounts(tumorReadPile.qualitySums.getCounts(altAllele));
            candidate.setInitialTumorRefCounts(tumorReadPile.qualitySums.getCounts(upRef));
            candidate.setInitialTumorAltQualitySum(tumorReadPile.qualitySums.getQualitySum(altAllele));
            candidate.setInitialTumorRefQualitySum(tumorReadPile.qualitySums.getQualitySum(upRef));

            double tumorLod = tumorReadPile.calculateAltVsRefLOD((byte) altAllele, candidate.getTumorF(), 0);
            candidate.setTumorLodFStar(tumorLod);

            candidate.setInitialTumorReadDepth(tumorReadPile.finalPileupReads.size());
            candidate.setTumorInsertionCount(tumorReadPile.getInsertionsCount());
            candidate.setTumorDeletionCount(tumorReadPile.getDeletionsCount());

            if (candidate.getTumorLodFStar() < MTAC.INITIAL_TUMOR_LOD_THRESHOLD) {
                continue;
            }

            // calculate lod of contaminant
            double contaminantF = Math.min(contaminantAlternateFraction, candidate.getTumorF());
            VariableAllelicRatioGenotypeLikelihoods contaminantLikelihoods = new VariableAllelicRatioGenotypeLikelihoods(
                    upRef, contaminantF);

            List<PileupElement> peList = new ArrayList<PileupElement>(
                    tumorReadPile.finalPileup.depthOfCoverage());
            for (PileupElement pe : tumorReadPile.finalPileup) {
                peList.add(pe);
            }

            Collections.sort(peList, new PileupComparatorByAltRefQual((byte) altAllele));
            int readsToKeep = (int) (peList.size() * contaminantAlternateFraction);

            for (PileupElement pe : peList) {
                byte base = pe.getBase();
                if (pe.getBase() == altAllele) {
                    // if we've retained all we need, then turn the remainder of alts to ref
                    if (readsToKeep == 0) {
                        base = (byte) upRef;
                    } else {
                        readsToKeep--;
                    }
                }

                contaminantLikelihoods.add(base, pe.getQual());
            }
            double[] refHetHom = LocusReadPile.extractRefHetHom(contaminantLikelihoods, upRef, altAllele);
            double contaminantLod = refHetHom[1] - refHetHom[0];
            candidate.setContaminantLod(contaminantLod);

            final QualitySums normQs = normalReadPile.qualitySums;

            VariableAllelicRatioGenotypeLikelihoods normalGl = normalReadPile
                    .calculateLikelihoods(normalReadPile.qualityScoreFilteredPileup); // use MAPQ0 reads
            candidate.setInitialNormalBestGenotype(normalReadPile.getBestGenotype(normalGl));
            candidate.setInitialNormalLod(LocusReadPile.getRefVsAlt(normalGl, upRef, altAllele));

            double normalF = Math.max(LocusReadPile
                    .estimateAlleleFraction(normalReadPile.qualityScoreFilteredPileup, upRef, altAllele),
                    MTAC.MINIMUM_NORMAL_ALLELE_FRACTION);
            candidate.setNormalF(normalF);

            candidate.setInitialNormalAltQualitySum(normQs.getQualitySum(altAllele));
            candidate.setInitialNormalRefQualitySum(normQs.getQualitySum(upRef));

            candidate.setNormalAltQualityScores(normQs.getBaseQualityScores(altAllele));
            candidate.setNormalRefQualityScores(normQs.getBaseQualityScores(upRef));

            candidate.setInitialNormalAltCounts(normQs.getCounts(altAllele));
            candidate.setInitialNormalRefCounts(normQs.getCounts(upRef));
            candidate.setInitialNormalReadDepth(normalReadPile.finalPileupReads.size());

            // TODO: parameterize filtering Mate-Rescued Reads (if someone wants to disable this)
            final LocusReadPile t2 = filterReads(ref, tumorReadPile.finalPileup, true);

            // if there are no reads remaining, abandon this theory
            if (!MTAC.FORCE_OUTPUT && t2.finalPileupReads.size() == 0) {
                continue;
            }

            candidate.setInitialTumorAltCounts(t2.qualitySums.getCounts(altAllele));
            candidate.setInitialTumorRefCounts(t2.qualitySums.getCounts(upRef));
            candidate.setInitialTumorAltQualitySum(t2.qualitySums.getQualitySum(altAllele));
            candidate.setInitialTumorRefQualitySum(t2.qualitySums.getQualitySum(upRef));

            candidate.setTumorAltQualityScores(t2.qualitySums.getBaseQualityScores(altAllele));
            candidate.setTumorRefQualityScores(t2.qualitySums.getBaseQualityScores(upRef));

            VariableAllelicRatioGenotypeLikelihoods t2Gl = t2.calculateLikelihoods(t2.finalPileup);
            candidate.setInitialTumorLod(t2.getAltVsRef(t2Gl, upRef, altAllele));
            candidate.setInitialTumorReadDepth(t2.finalPileupReads.size());

            candidate.setTumorF(t2.estimateAlleleFraction(upRef, altAllele));
            double tumorLod2 = t2.calculateAltVsRefLOD((byte) altAllele, candidate.getTumorF(), 0);
            candidate.setTumorLodFStar(tumorLod2);

            //TODO: clean up use of forward/reverse vs positive/negative (prefer the latter since GATK uses it)
            ReadBackedPileup forwardPileup = filterReads(ref, tumorReadPile.finalPileupPositiveStrand,
                    true).finalPileupPositiveStrand;
            double f2forward = LocusReadPile.estimateAlleleFraction(forwardPileup, upRef, altAllele);
            candidate.setTumorLodFStarForward(
                    t2.calculateAltVsRefLOD(forwardPileup, (byte) altAllele, f2forward, 0.0));

            ReadBackedPileup reversePileup = filterReads(ref, tumorReadPile.finalPileupNegativeStrand,
                    true).finalPileupNegativeStrand;
            double f2reverse = LocusReadPile.estimateAlleleFraction(reversePileup, upRef, altAllele);
            candidate.setTumorLodFStarReverse(
                    t2.calculateAltVsRefLOD(reversePileup, (byte) altAllele, f2reverse, 0.0));

            // calculate strand bias power
            candidate.setPowerToDetectPositiveStrandArtifact(strandArtifactPowerCalculator
                    .cachingPowerCalculation(reversePileup.depthOfCoverage(), candidate.getTumorF()));
            candidate.setPowerToDetectNegativeStrandArtifact(strandArtifactPowerCalculator
                    .cachingPowerCalculation(forwardPileup.depthOfCoverage(), candidate.getTumorF()));

            candidate.setStrandContingencyTable(SequenceUtils.getStrandContingencyTable(forwardPileup,
                    reversePileup, (byte) upRef, (byte) altAllele));

            ArrayList<PileupElement> mutantPileupElements = new ArrayList<PileupElement>();
            ArrayList<PileupElement> referencePileupElements = new ArrayList<PileupElement>();

            for (PileupElement p : t2.finalPileup) {
                final SAMRecord read = p.getRead();
                final int offset = p.getOffset();

                if (read.getReadString().charAt(offset) == altAllele) {
                    mutantPileupElements.add(p);
                } else if (read.getReadString().charAt(offset) == upRef) {
                    referencePileupElements.add(p);
                } else {
                    // just drop the read...
                }
            }

            ReadBackedPileup mutantPileup = new ReadBackedPileupImpl(rawContext.getLocation(),
                    mutantPileupElements);

            ReadBackedPileup referencePileup = new ReadBackedPileupImpl(rawContext.getLocation(),
                    referencePileupElements);

            // TODO: shouldn't this be refAllele here?
            final LocusReadPile mutantPile = new LocusReadPile(mutantPileup, altAllele, 0, 0,
                    MTAC.ENABLE_QSCORE_OUTPUT);
            final LocusReadPile refPile = new LocusReadPile(referencePileup, altAllele, 0, 0,
                    MTAC.ENABLE_QSCORE_OUTPUT);

            // Set the maximum observed mapping quality score for the reference and alternate alleles
            int[] rmq = referencePileup.getMappingQuals();
            candidate.setTumorRefMaxMapQ((rmq.length == 0) ? 0 : NumberUtils.max(rmq));

            int[] amq = mutantPileup.getMappingQuals();
            candidate.setTumorAltMaxMapQ((amq.length == 0) ? 0 : NumberUtils.max(amq));

            // start with just the tumor pile
            candidate.setTumorAltForwardOffsetsInRead(SequenceUtils.getForwardOffsetsInRead(mutantPileup));
            candidate.setTumorAltReverseOffsetsInRead(SequenceUtils.getReverseOffsetsInRead(mutantPileup));

            if (candidate.getTumorAltForwardOffsetsInRead().size() > 0) {
                double[] offsets = MuTectStats
                        .convertIntegersToDoubles(candidate.getTumorAltForwardOffsetsInRead());
                double median = MuTectStats.getMedian(offsets);
                candidate.setTumorForwardOffsetsInReadMedian(median);
                candidate.setTumorForwardOffsetsInReadMad(MuTectStats.calculateMAD(offsets, median));
            }

            if (candidate.getTumorAltReverseOffsetsInRead().size() > 0) {
                double[] offsets = MuTectStats
                        .convertIntegersToDoubles(candidate.getTumorAltReverseOffsetsInRead());
                double median = MuTectStats.getMedian(offsets);
                candidate.setTumorReverseOffsetsInReadMedian(median);
                candidate.setTumorReverseOffsetsInReadMad(MuTectStats.calculateMAD(offsets, median));
            }

            // test to see if the candidate should be rejected
            performRejection(candidate);

            if (MTAC.FORCE_ALLELES) {
                out.println(callStatsGenerator.generateCallStats(candidate));
            } else {
                messageByTumorLod.put(candidate.getInitialTumorLod(), candidate);
            }
        }

        // if more than one site passes the tumor lod threshold for KEEP the fail the tri_allelic Site filter
        int passingCandidates = 0;
        for (CandidateMutation c : messageByTumorLod.values()) {
            if (c.getTumorLodFStar() >= MTAC.TUMOR_LOD_THRESHOLD) {
                passingCandidates++;
            }
        }

        if (passingCandidates > 1) {
            for (CandidateMutation c : messageByTumorLod.values()) {
                c.addRejectionReason("triallelic_site");
            }
        }

        // write out the call stats for the "best" candidate
        if (!messageByTumorLod.isEmpty()) {
            CandidateMutation m = messageByTumorLod.lastEntry().getValue();

            // only output passing calls OR rejected sites if ONLY_PASSING_CALLS is not specified
            if (!m.isRejected() || (m.isRejected() && !MTAC.ONLY_PASSING_CALLS)) {

                out.println(callStatsGenerator.generateCallStats(m));
                if (vcf != null) {
                    vcf.add(VCFGenerator.generateVC(m));
                }
            }
        }

        return -1;
    } catch (Throwable t) {
        System.err.println("Error processing " + rawContext.getContig() + ":" + rawContext.getPosition());
        t.printStackTrace(System.err);

        throw new RuntimeException(t);
    }
}