Example usage for java.util Collection remove

List of usage examples for java.util Collection remove

Introduction

In this page you can find the example usage for java.util Collection remove.

Prototype

boolean remove(Object o);

Source Link

Document

Removes a single instance of the specified element from this collection, if it is present (optional operation).

Usage

From source file:org.apache.hadoop.hdfs.server.namenode.FSNamesystem.java

/**
 * We want "replication" replicates for the block, but we now have too many.  
 * In this method, copy enough nodes from 'srcNodes' into 'dstNodes' such that:
 *
 * srcNodes.size() - dstNodes.size() == replication
 *
 * We pick node that make sure that replicas are spread across racks and
 * also try hard to pick one with least free space.
 * The algorithm is first to pick a node with least free space from nodes
 * that are on a rack holding more than one replicas of the block.
 * So removing such a replica won't remove a rack. 
 * If no such a node is available,/*from   w w  w.  ja v a 2 s. c o  m*/
 * then pick a node with least free space
 */
void chooseExcessReplicates(Collection<DatanodeDescriptor> nonExcess, Block b, short replication,
        DatanodeDescriptor addedNode, DatanodeDescriptor delNodeHint) {
    // first form a rack to datanodes map and
    HashMap<String, ArrayList<DatanodeDescriptor>> rackMap = new HashMap<String, ArrayList<DatanodeDescriptor>>();
    for (Iterator<DatanodeDescriptor> iter = nonExcess.iterator(); iter.hasNext();) {
        DatanodeDescriptor node = iter.next();
        String rackName = node.getNetworkLocation();
        ArrayList<DatanodeDescriptor> datanodeList = rackMap.get(rackName);
        if (datanodeList == null) {
            datanodeList = new ArrayList<DatanodeDescriptor>();
        }
        datanodeList.add(node);
        rackMap.put(rackName, datanodeList);
    }

    // split nodes into two sets
    // priSet contains nodes on rack with more than one replica
    // remains contains the remaining nodes
    ArrayList<DatanodeDescriptor> priSet = new ArrayList<DatanodeDescriptor>();
    ArrayList<DatanodeDescriptor> remains = new ArrayList<DatanodeDescriptor>();
    for (Iterator<Entry<String, ArrayList<DatanodeDescriptor>>> iter = rackMap.entrySet().iterator(); iter
            .hasNext();) {
        Entry<String, ArrayList<DatanodeDescriptor>> rackEntry = iter.next();
        ArrayList<DatanodeDescriptor> datanodeList = rackEntry.getValue();
        if (datanodeList.size() == 1) {
            remains.add(datanodeList.get(0));
        } else {
            priSet.addAll(datanodeList);
        }
    }

    // pick one node to delete that favors the delete hint
    // otherwise pick one with least space from priSet if it is not empty
    // otherwise one node with least space from remains
    boolean firstOne = true;
    while (nonExcess.size() - replication > 0) {
        DatanodeInfo cur = null;
        long minSpace = Long.MAX_VALUE;

        // check if we can del delNodeHint
        if (firstOne && delNodeHint != null && nonExcess.contains(delNodeHint)
                && (priSet.contains(delNodeHint) || (addedNode != null && !priSet.contains(addedNode)))) {
            cur = delNodeHint;
        } else { // regular excessive replica removal
            Iterator<DatanodeDescriptor> iter = priSet.isEmpty() ? remains.iterator() : priSet.iterator();
            while (iter.hasNext()) {
                DatanodeDescriptor node = iter.next();
                long free = node.getRemaining();

                if (minSpace > free) {
                    minSpace = free;
                    cur = node;
                }
            }
        }

        firstOne = false;
        // adjust rackmap, priSet, and remains
        String rack = cur.getNetworkLocation();
        ArrayList<DatanodeDescriptor> datanodes = rackMap.get(rack);
        datanodes.remove(cur);
        if (datanodes.isEmpty()) {
            rackMap.remove(rack);
        }
        if (priSet.remove(cur)) {
            if (datanodes.size() == 1) {
                priSet.remove(datanodes.get(0));
                remains.add(datanodes.get(0));
            }
        } else {
            remains.remove(cur);
        }

        nonExcess.remove(cur);

        Collection<Block> excessBlocks = excessReplicateMap.get(cur.getStorageID());
        if (excessBlocks == null) {
            excessBlocks = new TreeSet<Block>();
            excessReplicateMap.put(cur.getStorageID(), excessBlocks);
        }
        if (excessBlocks.add(b)) {
            excessBlocksCount++;
            NameNode.stateChangeLog.debug("BLOCK* NameSystem.chooseExcessReplicates: " + "(" + cur.getName()
                    + ", " + b + ") is added to excessReplicateMap");
        }

        //
        // The 'excessblocks' tracks blocks until we get confirmation
        // that the datanode has deleted them; the only way we remove them
        // is when we get a "removeBlock" message.  
        //
        // The 'invalidate' list is used to inform the datanode the block 
        // should be deleted.  Items are removed from the invalidate list
        // upon giving instructions to the namenode.
        //
        addToInvalidatesNoLog(b, cur);
        NameNode.stateChangeLog.info("BLOCK* NameSystem.chooseExcessReplicates: " + "(" + cur.getName() + ", "
                + b + ") is added to recentInvalidateSets");
    }
}

From source file:org.apache.maven.model.building.DefaultModelBuilder.java

private void importDependencyManagement(Model model, String scope, ModelBuildingRequest request,
        DefaultModelProblemCollector problems, Collection<String> importIds) {
    DependencyManagement depMngt = model.getDependencyManagement();

    if (depMngt == null) {
        return;/*from   w  w  w.  j  ava2  s.c o  m*/
    }

    problems.setSource(model);

    String importing = model.getGroupId() + ':' + model.getArtifactId() + ':' + model.getVersion();

    importIds.add(importing);

    final WorkspaceModelResolver workspaceResolver = request.getWorkspaceModelResolver();
    final ModelResolver modelResolver = request.getModelResolver();

    ModelBuildingRequest importRequest = null;

    List<DependencyManagement> importMngts = null;

    for (Iterator<Dependency> it = depMngt.getDependencies().iterator(); it.hasNext();) {
        Dependency dependency = it.next();

        if (!"pom".equals(dependency.getType()) || !scope.equals(dependency.getScope())) {
            continue;
        }

        it.remove();

        String groupId = dependency.getGroupId();
        String artifactId = dependency.getArtifactId();
        String version = dependency.getVersion();

        if (groupId == null || groupId.length() <= 0) {
            problems.add(new ModelProblemCollectorRequest(Severity.ERROR, Version.BASE)
                    .setMessage("'dependencyManagement.dependencies.dependency.groupId' for "
                            + dependency.getManagementKey() + " is missing.")
                    .setLocation(dependency.getLocation("")));
            continue;
        }
        if (artifactId == null || artifactId.length() <= 0) {
            problems.add(new ModelProblemCollectorRequest(Severity.ERROR, Version.BASE)
                    .setMessage("'dependencyManagement.dependencies.dependency.artifactId' for "
                            + dependency.getManagementKey() + " is missing.")
                    .setLocation(dependency.getLocation("")));
            continue;
        }
        if (version == null || version.length() <= 0) {
            problems.add(new ModelProblemCollectorRequest(Severity.ERROR, Version.BASE)
                    .setMessage("'dependencyManagement.dependencies.dependency.version' for "
                            + dependency.getManagementKey() + " is missing.")
                    .setLocation(dependency.getLocation("")));
            continue;
        }

        String imported = groupId + ':' + artifactId + ':' + version;

        if (importIds.contains(imported)) {
            String message = "The dependencies of type=pom and scope=" + scope + " form a cycle: ";
            for (String modelId : importIds) {
                message += modelId + " -> ";
            }
            message += imported;
            problems.add(new ModelProblemCollectorRequest(Severity.ERROR, Version.BASE).setMessage(message));
            continue;
        }

        DependencyManagement importMngt = getCache(request.getModelCache(), groupId, artifactId, version,
                ModelCacheTag.IMPORT);

        if (importMngt == null) {
            if (workspaceResolver == null && modelResolver == null) {
                throw new NullPointerException(String.format(
                        "request.workspaceModelResolver and request.modelResolver cannot be null"
                                + " (parent POM %s and POM %s)",
                        ModelProblemUtils.toId(groupId, artifactId, version),
                        ModelProblemUtils.toSourceHint(model)));
            }

            Model importModel = null;
            if (workspaceResolver != null) {
                try {
                    importModel = workspaceResolver.resolveEffectiveModel(groupId, artifactId, version);
                } catch (UnresolvableModelException e) {
                    problems.add(new ModelProblemCollectorRequest(Severity.FATAL, Version.BASE)
                            .setMessage(e.getMessage().toString()).setException(e));
                    continue;
                }
            }

            // no workspace resolver or workspace resolver returned null (i.e. model not in workspace)
            if (importModel == null) {
                final ModelSource importSource;
                try {
                    dependency = dependency.clone();
                    importSource = modelResolver.resolveModel(dependency);
                    final String resolvedId = dependency.getGroupId() + ':' + dependency.getArtifactId() + ':'
                            + dependency.getVersion();

                    if (!imported.equals(resolvedId) && importIds.contains(resolvedId)) {
                        // A version range has been resolved to a cycle.
                        String message = "The dependencies of type=pom and scope=" + scope + " form a cycle: ";
                        for (String modelId : importIds) {
                            message += modelId + " -> ";
                        }
                        message += resolvedId;
                        problems.add(new ModelProblemCollectorRequest(Severity.ERROR, Version.BASE)
                                .setMessage(message));

                        continue;
                    }
                } catch (UnresolvableModelException e) {
                    StringBuilder buffer = new StringBuilder(256);
                    buffer.append("Non-resolvable " + scope + " POM");
                    if (!containsCoordinates(e.getMessage(), groupId, artifactId, version)) {
                        buffer.append(' ').append(ModelProblemUtils.toId(groupId, artifactId, version));
                    }
                    buffer.append(": ").append(e.getMessage());

                    problems.add(new ModelProblemCollectorRequest(Severity.ERROR, Version.BASE)
                            .setMessage(buffer.toString()).setLocation(dependency.getLocation(""))
                            .setException(e));
                    continue;
                }

                if (importRequest == null) {
                    importRequest = new DefaultModelBuildingRequest();
                    importRequest.setValidationLevel(ModelBuildingRequest.VALIDATION_LEVEL_MINIMAL);
                    importRequest.setModelCache(request.getModelCache());
                    importRequest.setSystemProperties(request.getSystemProperties());
                    importRequest.setUserProperties(request.getUserProperties());
                    importRequest.setLocationTracking(request.isLocationTracking());
                }

                importRequest.setModelSource(importSource);
                importRequest.setModelResolver(modelResolver.newCopy());

                final ModelBuildingResult importResult;
                try {
                    importResult = build(importRequest);
                } catch (ModelBuildingException e) {
                    problems.addAll(e.getProblems());
                    continue;
                }

                problems.addAll(importResult.getProblems());

                importModel = importResult.getEffectiveModel();
            }

            importMngt = importModel.getDependencyManagement();

            if (importMngt == null) {
                importMngt = new DependencyManagement();
            }

            putCache(request.getModelCache(), groupId, artifactId, version, ModelCacheTag.IMPORT, importMngt);
        }

        if (importMngts == null) {
            importMngts = new ArrayList<>();
        }

        importMngts.add(importMngt);
    }

    importIds.remove(importing);

    dependencyManagementImporter.importManagement(model, importMngts, request, problems);
}

From source file:com.nextep.designer.sqlgen.generic.impl.JDBCCapturer.java

/**
 * Returns a <code>Collection</code> of the foreign keys of the specified
 * table present in the data source pointed to by the connection object
 * provided by the specified <code>context</code> and notifies the specified
 * <code>monitor</code> while capturing.
 * //from   w w w.j  av  a 2s .  co  m
 * @param context
 *            a {@link ICaptureContext} to store the captured objects
 * @param monitor
 *            the {@link IProgressMonitor} to notify while capturing objects
 * @param allTables
 *            a <code>Map</code> of all tables previously captured
 * @param allTablesColumns
 *            a <code>Map</code> of all columns previously captured
 * @param table
 *            the {@link IBasicTable} for which foreign keys must be
 *            captured
 * @return a {@link Collection} of {@link ForeignKeyConstraint} objects if
 *         the specified table has foreign keys, an empty
 *         <code>Collection</code> otherwise
 */
private Collection<ForeignKeyConstraint> getTableForeignKeys(ICaptureContext context, IProgressMonitor monitor,
        Map<String, IBasicTable> allTables, Map<String, IBasicColumn> allTablesColumns, IBasicTable table) {
    Collection<ForeignKeyConstraint> foreignKeys = new ArrayList<ForeignKeyConstraint>();
    IFormatter formatter = getConnectionVendor(context).getNameFormatter();

    final String tableName = table.getName();
    try {
        final DatabaseMetaData md = ((Connection) context.getConnectionObject()).getMetaData();

        ResultSet rset = null;
        if (md != null) {
            rset = md.getImportedKeys(getObjectOrContextCatalog(context, table),
                    getObjectOrContextSchema(context, table), tableName);
            CaptureHelper.updateMonitor(monitor, getCounter(), 1, 1);
        }

        if (rset != null) {
            ForeignKeyConstraint currFk = null;
            String currFkName = null;
            boolean keyIsValid = false;

            try {
                while (rset.next()) {
                    final String fkName = rset.getString(COLUMN_NAME_FK_NAME);
                    final String fkColumnName = rset.getString(COLUMN_NAME_FKCOLUMN_NAME);
                    final String pkTableName = rset.getString(COLUMN_NAME_PKTABLE_NAME);
                    final String pkName = rset.getString(COLUMN_NAME_PK_NAME);
                    final short onUpdateRule = rset.getShort(COLUMN_NAME_UPDATE_RULE);
                    final short onDeleteRule = rset.getShort(COLUMN_NAME_DELETE_RULE);
                    final short deferrability = rset.getShort(COLUMN_NAME_DEFERRABILITY);

                    if (fkName != null && !"".equals(fkName.trim())) { //$NON-NLS-1$
                        if (LOGGER.isDebugEnabled()) {
                            String logPrefix = "[" + tableName + "][" + fkName + "]"; //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
                            LOGGER.debug("= " + logPrefix + " Foreign Key Metadata ="); //$NON-NLS-1$ //$NON-NLS-2$
                            LOGGER.debug(logPrefix + "[" + COLUMN_NAME_FKCOLUMN_NAME + "] " //$NON-NLS-1$ //$NON-NLS-2$
                                    + fkColumnName);
                            LOGGER.debug(logPrefix + "[" + COLUMN_NAME_PKTABLE_NAME + "] " //$NON-NLS-1$ //$NON-NLS-2$
                                    + pkTableName);
                            LOGGER.debug(logPrefix + "[" + COLUMN_NAME_PK_NAME + "] " + pkName); //$NON-NLS-1$ //$NON-NLS-2$
                            LOGGER.debug(logPrefix + "[" + COLUMN_NAME_UPDATE_RULE + "] " //$NON-NLS-1$ //$NON-NLS-2$
                                    + onUpdateRule);
                            LOGGER.debug(logPrefix + "[" + COLUMN_NAME_DELETE_RULE + "] " //$NON-NLS-1$ //$NON-NLS-2$
                                    + onDeleteRule);
                            LOGGER.debug(logPrefix + "[" + COLUMN_NAME_DEFERRABILITY + "] " //$NON-NLS-1$ //$NON-NLS-2$
                                    + deferrability);
                        }

                        if (null == currFkName || !currFkName.equals(fkName) || keyIsValid) {
                            currFkName = fkName;
                            final String formatFkName = formatter.format(fkName);
                            final String formatFkColumnName = formatter.format(fkColumnName);

                            /*
                             * We need to check for each foreign key's
                             * column that the referenced table exists in
                             * the current context because some columns
                             * might be pointing to a synonym.
                             */
                            final String formatPkTableName = formatter.format(pkTableName);
                            IBasicTable pkTable = allTables.get(formatPkTableName);

                            if (pkTable != null) {

                                if (null == currFk || !formatFkName.equals(currFk.getName())) {
                                    final IKeyConstraint refPk = DBGMHelper.getPrimaryKey(pkTable);

                                    if (refPk != null) {
                                        /*
                                         * FIXME [BGA]: The
                                         * TypedObjectFactory does not work
                                         * as UniqueKeyConstraint and
                                         * ForeignKeyConstraint classes have
                                         * the same super interface
                                         * IKeyConstraint. We use an
                                         * explicit constructor instead.
                                         */
                                        // currFk = typedObjFactory
                                        // .create(ForeignKeyConstraint.class);
                                        // currFk.setName(formatFkName);
                                        // currFk.setConstrainedTable(pkTable);
                                        currFk = new ForeignKeyConstraint(formatFkName, "", //$NON-NLS-1$
                                                pkTable);

                                        currFk.setRemoteConstraint(refPk);
                                        currFk.setOnUpdateAction(
                                                CaptureHelper.getForeignKeyAction(onUpdateRule));
                                        currFk.setOnDeleteAction(
                                                CaptureHelper.getForeignKeyAction(onDeleteRule));
                                        foreignKeys.add(currFk);
                                        keyIsValid = true;
                                    } else {
                                        LOGGER.warn("Foreign key [" + formatFkName
                                                + "] has been ignored during import because the referenced primary key ["
                                                + formatPkTableName + "[" //$NON-NLS-1$
                                                + formatter.format(pkName)
                                                + "]] could not be found in the current workspace");
                                        keyIsValid = false;
                                        continue;
                                    }
                                }

                                final IBasicColumn column = allTablesColumns
                                        .get(CaptureHelper.getUniqueObjectName(tableName, formatFkColumnName));
                                if (column != null) {
                                    /*
                                     * Columns are ordered by PKTABLE_NAME,
                                     * KEY_SEQ in the returned ResultSet, so
                                     * we don't have to specify the position
                                     * of the constrained column when adding
                                     * it to the foreign key constraint.
                                     */
                                    currFk.addColumn(column);
                                } else {
                                    LOGGER.warn("Foreign key [" + formatFkName
                                            + "] has been ignored during import because the referencing column ["
                                            + tableName + "[" + formatFkColumnName //$NON-NLS-1$
                                            + "]] could not be found in the current workspace");
                                    keyIsValid = false;

                                    /*
                                     * Now the foreign key is invalid, we
                                     * remove it from the foreign keys list
                                     * that will be returned to the caller
                                     * of this method.
                                     */
                                    foreignKeys.remove(currFk);
                                }
                            } else {
                                if (LOGGER.isDebugEnabled()) {
                                    LOGGER.debug("Foreign key column [" + formatFkName + "[" //$NON-NLS-2$
                                            + formatFkColumnName
                                            + "]] has been ignored during import because the referenced table ["
                                            + formatPkTableName
                                            + "] could not be found in the current workspace");
                                }
                            }
                        }
                    }
                }
            } finally {
                CaptureHelper.safeClose(rset, null);
            }
        }
    } catch (SQLException sqle) {
        LOGGER.error("Unable to fetch foreign keys for table [" + tableName + "] from "
                + getConnectionVendorName(context) + " server: " + sqle.getMessage(), sqle);
    }

    return foreignKeys;
}

From source file:org.biomart.configurator.controller.MartController.java

/**
 * @param fksToBeDropped//from   ww w  .  j  a  va  2 s.c  o  m
 * @param dmd
 * @param schema
 * @param catalog
 * @param stepSize
 * @throws SQLException
 * @throws DataModelException
 */
public void synchroniseKeysUsingDMD(final SourceSchema ss, final Collection<ForeignKey> fksToBeDropped,
        final DatabaseMetaData dmd, final String schema, final String catalog)
        throws SQLException, DataModelException {
    Log.debug("Running DMD key synchronisation");
    // Loop through all the tables in the database, which is the same
    // as looping through all the primary keys.
    Log.debug("Finding tables");
    for (final Iterator<Table> i = ss.getTables().iterator(); i.hasNext();) {

        // Obtain the table and its primary key.
        final SourceTable pkTable = (SourceTable) i.next();
        final PrimaryKey pk = pkTable.getPrimaryKey();
        // Skip all tables which have no primary key.
        if (pk == null)
            continue;

        Log.debug("Processing primary key " + pk);

        // Make a list of relations that already exist in this schema,
        // from some previous run. Any relations that are left in this
        // list by the end of the loop for this table no longer exist in
        // the database, and will be dropped.
        final Collection<Relation> relationsToBeDropped = new TreeSet<Relation>(pk.getRelations()); // Tree for
                                                                                                    // order

        // Identify all foreign keys in the database metadata that refer
        // to the current primary key.
        Log.debug("Finding referring foreign keys");
        String searchCatalog = catalog;
        String searchSchema = schema;
        final ResultSet dbTblFKCols = dmd.getExportedKeys(searchCatalog, searchSchema, pkTable.getName());

        // Loop through the results. There will be one result row per
        // column per key, so we need to build up a set of key columns
        // in a map.
        // The map keys represent the column position within a key. Each
        // map value is a list of columns. In essence the map is a 2-D
        // representation of the foreign keys which refer to this PK,
        // with the keys of the map (Y-axis) representing the column
        // position in the FK, and the values of the map (X-axis)
        // representing each individual FK. In all cases, FK columns are
        // assumed to be in the same order as the PK columns. The map is
        // sorted by key column position.
        // An assumption is made that the query will return columns from
        // the FK in the same order as all other FKs, ie. all column 1s
        // will be returned before any 2s, and then all 2s will be
        // returned
        // in the same order as the 1s they are associated with, etc.
        final TreeMap<Short, List<Column>> dbFKs = new TreeMap<Short, List<Column>>();
        while (dbTblFKCols.next()) {
            final String fkTblName = dbTblFKCols.getString("FKTABLE_NAME");
            final String fkColName = dbTblFKCols.getString("FKCOLUMN_NAME");
            final Short fkColSeq = new Short(dbTblFKCols.getShort("KEY_SEQ"));
            if (fkTblName != null && fkTblName.contains("$")) { // exclude ORACLE's temporary tables (unlikely to be
                                                                // found here though)
                continue;
            }

            // Note the column.
            if (!dbFKs.containsKey(fkColSeq))
                dbFKs.put(fkColSeq, new ArrayList<Column>());
            // In some dbs, FKs can be invalid, so we need to check
            // them.
            final Table fkTbl = ss.getTableByName(fkTblName);
            if (fkTbl != null) {
                final Column fkCol = (Column) fkTbl.getColumnByName(fkColName);
                if (fkCol != null)
                    (dbFKs.get(fkColSeq)).add(fkCol);
            }
        }
        dbTblFKCols.close();

        // Sort foreign keys by name (case insensitive)
        for (List<Column> columnList : dbFKs.values()) {
            Collections.sort(columnList);
        }

        // Only construct FKs if we actually found any.
        if (!dbFKs.isEmpty()) {
            // Identify the sequence of the first column, which may be 0
            // or 1, depending on database implementation.
            final int firstColSeq = ((Short) dbFKs.firstKey()).intValue();

            // How many columns are in the PK?
            final int pkColCount = pkTable.getPrimaryKey().getColumns().size();

            // How many FKs do we have?
            final int fkCount = dbFKs.get(dbFKs.firstKey()).size();

            // Loop through the FKs, and construct each one at a time.
            for (int j = 0; j < fkCount; j++) {
                // Set up an array to hold the FK columns.
                final List<Column> candidateFKColumns = new ArrayList<Column>();

                // For each FK column name, look up the actual column in
                // the table.
                for (final Iterator<Map.Entry<Short, List<Column>>> k = dbFKs.entrySet().iterator(); k
                        .hasNext();) {
                    final Map.Entry<Short, List<Column>> entry = k.next();
                    final Short keySeq = (Short) entry.getKey();
                    // Convert the db-specific column index to a
                    // 0-indexed figure for the array of fk columns.
                    final int fkColSeq = keySeq.intValue() - firstColSeq;
                    candidateFKColumns.add((Column) (entry.getValue()).get(j));
                }

                // Create a template foreign key based around the set
                // of candidate columns we found.
                ForeignKey fkObject;
                try {
                    List<Column> columns = new ArrayList<Column>();
                    for (int k = 0; k < candidateFKColumns.size(); k++) {
                        columns.add(candidateFKColumns.get(k));
                    }
                    fkObject = new ForeignKey(columns);
                    // new KeyController(fkObject);
                } catch (final Throwable t) {
                    throw new BioMartError(t);
                }
                final Table fkTable = fkObject.getTable();

                // If any FK already exists on the target table with the
                // same columns in the same order, then reuse it.
                boolean fkAlreadyExists = false;
                for (final Iterator<ForeignKey> f = fkTable.getForeignKeys().iterator(); f.hasNext()
                        && !fkAlreadyExists;) {
                    final ForeignKey candidateFK = f.next();
                    if (candidateFK.equals(fkObject)) {
                        // Found one. Reuse it!
                        fkObject = candidateFK;
                        // Update the status to indicate that the FK is
                        // backed by the database, if previously it was
                        // handmade.
                        if (fkObject.getStatus().equals(ComponentStatus.HANDMADE))
                            fkObject.setStatus(ComponentStatus.INFERRED);
                        // Remove the FK from the list to be dropped
                        // later, as it definitely exists now.
                        fksToBeDropped.remove(candidateFK);
                        // Flag the key as existing.
                        fkAlreadyExists = true;
                    }
                }

                // Has the key been reused, or is it a new one?
                if (!fkAlreadyExists)
                    try {
                        fkTable.getForeignKeys().add(fkObject);
                        // fkTable.getForeignKeys().add(fk);
                    } catch (final Throwable t) {
                        throw new BioMartError(t);
                    }

                // Work out whether the relation from the FK to
                // the PK should be 1:M or 1:1. The rule is that
                // it will be 1:M in all cases except where the
                // FK table has a PK with identical columns to
                // the FK, in which case it is 1:1, as the FK
                // is unique.
                Cardinality card = Cardinality.MANY_A;
                final PrimaryKey fkPK = fkTable.getPrimaryKey();
                if (fkPK != null && fkObject.getColumns().equals(fkPK.getColumns()))
                    card = Cardinality.ONE;

                // Check to see if it already has a relation.
                boolean relationExists = false;
                for (final Iterator<Relation> f = fkObject.getRelations().iterator(); f.hasNext();) {
                    // Obtain the next relation.
                    final Relation candidateRel = f.next();

                    // a) a relation already exists between the FK
                    // and the PK.
                    if (candidateRel.getOtherKey(fkObject).equals(pk)) {
                        // If cardinality matches, make it
                        // inferred. If doesn't match, make it
                        // modified and update original cardinality.
                        try {
                            if (card.equals(candidateRel.getCardinality())) {
                                if (!candidateRel.getStatus().equals(ComponentStatus.INFERRED_INCORRECT))
                                    candidateRel.setStatus(ComponentStatus.INFERRED);
                            } else {
                                if (!candidateRel.getStatus().equals(ComponentStatus.INFERRED_INCORRECT))
                                    candidateRel.setStatus(ComponentStatus.MODIFIED);
                                candidateRel.setOriginalCardinality(card);
                            }
                        } catch (final AssociationException ae) {
                            throw new BioMartError(ae);
                        }
                        // Don't drop it at the end of the loop.
                        relationsToBeDropped.remove(candidateRel);
                        // Say we've found it.
                        relationExists = true;
                    }

                    // b) a handmade relation exists elsewhere which
                    // should not be dropped. All other relations
                    // elsewhere will be dropped.
                    else if (candidateRel.getStatus().equals(ComponentStatus.HANDMADE))
                        // Don't drop it at the end of the loop.
                        relationsToBeDropped.remove(candidateRel);
                }

                // If relation did not already exist, create it.
                if (!relationExists && !pk.equals(fkObject)) {
                    // Establish the relation.
                    try {
                        new RelationSource(pk, fkObject, card);
                        // pk.getObject().addRelation(relation);
                        // fk.getObject().addRelation(relation);
                    } catch (final Throwable t) {
                        throw new BioMartError(t);
                    }
                }
            }
        }

        // Remove any relations that we didn't find in the database (but
        // leave the handmade ones behind).
        for (final Iterator<Relation> j = relationsToBeDropped.iterator(); j.hasNext();) {
            final Relation r = j.next();
            if (r.getStatus().equals(ComponentStatus.HANDMADE))
                continue;
            r.getFirstKey().removeRelation(r);
            r.getSecondKey().removeRelation(r);
        }
    }
}

From source file:org.biomart.configurator.controller.MartController.java

/**
 * This method implements the key-guessing algorithm for foreign keys. Basically, it iterates through all known
 * primary keys, and looks for sets of matching columns in other tables, either with the same names or with '_key'
 * appended. Any matching sets found are assumed to be foreign keys with relations to the current primary key.
 * <p>//  w w  w  . ja  v a2s . c  om
 * Relations are 1:M, except when the table at the FK end has a PK with identical column to the FK. In this case,
 * the FK is forced to be unique, which implies that it can only partake in a 1:1 relation, so the relation is
 * marked as such.
 * 
 * @param fksToBeDropped
 *            the list of foreign keys to update as we go along. By the end of the method, the only keys left in
 *            this list should be ones that no longer exist in the database and may be dropped.
 * @param stepSize
 *            the progress step size to increment by.
 * @throws SQLException
 *             if there was a problem talking to the database.
 * @throws DataModelException
 *             if there was a logical problem during construction of the set of foreign keys.
 */
public void synchroniseKeysUsingKeyGuessing(final SourceSchema ss, final Collection<ForeignKey> fksToBeDropped)
        throws SQLException, DataModelException {
    Log.debug("Running non-DMD key synchronisation");
    // Loop through all the tables in the database, which is the same
    // as looping through all the primary keys.
    Log.debug("Finding tables");
    for (final Iterator<Table> i = ss.getTables().iterator(); i.hasNext();) {
        // Obtain the table and its primary key.
        final SourceTable pkTable = (SourceTable) i.next();
        final PrimaryKey pk = pkTable.getPrimaryKey();
        // Skip all tables which have no primary key.
        if (pk == null)
            continue;

        Log.debug("Processing primary key " + pk);

        // If an FK exists on the PK table with the same columns as the
        // PK, then we cannot use this PK to make relations to other
        // tables.
        // This is because the FK shows that this table is not the
        // original source of the data in those columns. Some other
        // table is the original source, so we assume that relations
        // will have been established from that other table instead. So,
        // we skip this table.
        boolean pkIsAlsoAnFK = false;
        for (final Iterator<ForeignKey> j = pkTable.getForeignKeys().iterator(); j.hasNext()
                && !pkIsAlsoAnFK;) {
            final ForeignKey fk = j.next();
            if (fk.getColumns().equals(pk.getColumns()))
                pkIsAlsoAnFK = true;
        }
        if (pkIsAlsoAnFK)
            continue;

        // To maintain some degree of sanity here, we assume that a PK
        // is the original source of data (and not a copy of data
        // sourced from some other table) if the first column in the PK
        // has the same name as the table it is in, or with '_id'
        // appended, or is just 'id' on its own. Any PK which does not
        // have this property is skipped.
        final Column firstPKCol = pk.getColumns().get(0);
        String firstPKColName = firstPKCol.getName();
        int idPrefixIndex = firstPKColName.indexOf(Resources.get("primaryKeySuffix"));
        // then try uppercase, in Oracle, names are uppercase
        if (idPrefixIndex < 0)
            idPrefixIndex = firstPKColName.toUpperCase()
                    .indexOf(Resources.get("primaryKeySuffix").toUpperCase());
        if (idPrefixIndex >= 0)
            firstPKColName = firstPKColName.substring(0, idPrefixIndex);
        if (!firstPKColName.equals(pkTable.getName()) && !firstPKColName.equals(Resources.get("idCol")))
            continue;

        // Make a list of relations that already exist in this schema,
        // from some previous run. Any relations that are left in this
        // list by the end of the loop for this table no longer exist in
        // the database, and will be dropped.
        final Collection<Relation> relationsToBeDropped = new TreeSet<Relation>(pk.getRelations()); // Tree for
                                                                                                    // order

        // Now we know that we can use this PK for certain, look for all
        // other tables (other than the one the PK itself belongs to),
        // for sets of columns with identical names, or with '_key'
        // appended. Any set that we find is going to be an FK with a
        // relation back to this PK.
        Log.debug("Searching for possible referring foreign keys");
        for (final Iterator<Table> l = ss.getTables().iterator(); l.hasNext();) {
            // Obtain the next table to look at.
            final SourceTable fkTable = (SourceTable) l.next();

            // Make sure the table is not the same as the PK table.
            if (fkTable.equals(pkTable))
                continue;

            // Set up an empty list for the matching columns.
            final List<Column> candidateFKColumns = new ArrayList<Column>();
            int matchingColumnCount = 0;

            // Iterate through the PK columns and find a column in the
            // target FK table with the same name, or with '_key'
            // appended,
            // or with the PK table name and an underscore prepended.
            // If found, add that target column to the candidate FK
            // column
            // set.
            for (int columnIndex = 0; columnIndex < pk.getColumns().size(); columnIndex++) {
                final String pkColumnName = pk.getColumns().get(columnIndex).getName();
                // Start out by assuming no match.
                Column candidateFKColumn = null;
                // Don't try to find 'id' or 'id_key' columns as that
                // would be silly and would probably match far too much.
                if (!pkColumnName.equals(Resources.get("idCol"))) {
                    // Try equivalent name first.
                    candidateFKColumn = fkTable.getColumnByName(pkColumnName);
                    // Then try with '_key' appended, if not found.
                    if (candidateFKColumn == null)
                        candidateFKColumn = fkTable
                                .getColumnByName(pkColumnName + Resources.get("foreignKeySuffix"));
                }
                // Then try with PK tablename+'_' prepended, if not
                // found.
                if (candidateFKColumn == null)
                    candidateFKColumn = fkTable.getColumnByName(pkTable.getName() + "_" + pkColumnName);
                // Found it? Add it to the candidate list.
                if (candidateFKColumn != null) {
                    candidateFKColumns.add(candidateFKColumn);
                    matchingColumnCount++;
                }
            }

            // We found a matching set, so create a FK on it!
            if (matchingColumnCount == pk.getColumns().size()) {
                // Create a template foreign key based around the set
                // of candidate columns we found.
                ForeignKey fkObject;
                try {
                    List<Column> columns = new ArrayList<Column>();
                    for (int k = 0; k < candidateFKColumns.size(); k++) {
                        columns.add(candidateFKColumns.get(k));
                    }
                    fkObject = new ForeignKey(columns);
                    // new KeyController(fkObject);
                } catch (final Throwable t) {
                    throw new BioMartError(t);
                }

                // If any FK already exists on the target table with the
                // same columns in the same order, then reuse it.
                boolean fkAlreadyExists = false;
                for (final Iterator<ForeignKey> f = fkTable.getForeignKeys().iterator(); f.hasNext()
                        && !fkAlreadyExists;) {
                    final ForeignKey candidateFK = f.next();
                    if (candidateFK.equals(fkObject)) {
                        // Found one. Reuse it!
                        fkObject = candidateFK;
                        // Update the status to indicate that the FK is
                        // backed by the database, if previously it was
                        // handmade.
                        if (fkObject.getStatus().equals(ComponentStatus.HANDMADE))
                            fkObject.setStatus(ComponentStatus.INFERRED);
                        // Remove the FK from the list to be dropped
                        // later, as it definitely exists now.
                        fksToBeDropped.remove(candidateFK);
                        // Flag the key as existing.
                        fkAlreadyExists = true;
                    }
                }

                // Has the key been reused, or is it a new one?
                if (!fkAlreadyExists)
                    try {
                        // fkTable.getForeignKeys().add(fk);
                        fkTable.getForeignKeys().add(fkObject);
                    } catch (final Throwable t) {
                        throw new BioMartError(t);
                    }

                // Work out whether the relation from the FK to
                // the PK should be 1:M or 1:1. The rule is that
                // it will be 1:M in all cases except where the
                // FK table has a PK with identical columns to
                // the FK, in which case it is 1:1, as the FK
                // is unique.
                Cardinality card = Cardinality.MANY_A;
                final PrimaryKey fkPK = fkTable.getPrimaryKey();
                if (fkPK != null && fkObject.getColumns().equals(fkPK.getColumns()))
                    card = Cardinality.ONE;

                // Check to see if it already has a relation.
                boolean relationExists = false;
                for (final Iterator<Relation> f = fkObject.getRelations().iterator(); f.hasNext();) {
                    // Obtain the next relation.
                    final Relation candidateRel = f.next();

                    // a) a relation already exists between the FK
                    // and the PK.
                    if (candidateRel.getOtherKey(fkObject).equals(pk)) {
                        // If cardinality matches, make it
                        // inferred. If doesn't match, make it
                        // modified and update original cardinality.
                        try {
                            if (card.equals(candidateRel.getCardinality())) {
                                if (!candidateRel.getStatus().equals(ComponentStatus.INFERRED_INCORRECT))
                                    candidateRel.setStatus(ComponentStatus.INFERRED);
                            } else {
                                if (!candidateRel.getStatus().equals(ComponentStatus.INFERRED_INCORRECT))
                                    candidateRel.setStatus(ComponentStatus.MODIFIED);
                                candidateRel.setOriginalCardinality(card);
                            }
                        } catch (final AssociationException ae) {
                            throw new BioMartError(ae);
                        }
                        // Don't drop it at the end of the loop.
                        relationsToBeDropped.remove(candidateRel);
                        // Say we've found it.
                        relationExists = true;
                    }

                    // b) a handmade relation exists elsewhere which
                    // should not be dropped. All other relations
                    // elsewhere will be dropped.
                    else if (candidateRel.getStatus().equals(ComponentStatus.HANDMADE))
                        // Don't drop it at the end of the loop.
                        relationsToBeDropped.remove(candidateRel);
                }

                // If relation did not already exist, create it.
                if (!relationExists) {
                    // Establish the relation.
                    try {
                        RelationSource rel = new RelationSource(pk, fkObject, card);
                        // pk.getObject().addRelation(relation);
                        // fk.getObject().addRelation(relation);
                    } catch (final Throwable t) {
                        throw new BioMartError(t);
                    }
                }
            }
        }

        // Remove any relations that we didn't find in the database (but
        // leave the handmade ones behind).
        for (final Iterator<Relation> j = relationsToBeDropped.iterator(); j.hasNext();) {
            final Relation r = j.next();
            if (r.getStatus().equals(ComponentStatus.HANDMADE))
                continue;
            r.getFirstKey().removeRelation(r);
            r.getSecondKey().removeRelation(r);
        }
    }
}

From source file:org.nuclos.client.genericobject.GenericObjectCollectController.java

/**
 * complete the current collectable with the subform values which are set in the search panel
 *///www .j  a  v  a2 s  .  com
@Override
protected void newCollectableWithDependantSearchValues() throws NuclosBusinessException {
    Collection<SearchConditionSubFormController> collscsfc = getSubFormControllersInSearch();
    Collection<DetailsSubFormController<CollectableEntityObject>> colldsfc = getSubFormControllersInDetails();
    // iterate over each search subform
    for (SearchConditionSubFormController scsfc : collscsfc)
        // handel only subforms of the first hierarchie
        if (scsfc.getSubForm().getParentSubForm() == null)
            // iterate over each detail subform
            for (DetailsSubFormController<CollectableEntityObject> dsfc : colldsfc) {
                if (dsfc.getEntityAndForeignKeyFieldName().getEntityName()
                        .equals(scsfc.getEntityAndForeignKeyFieldName().getEntityName()))
                    if (dsfc.getSubForm().isEnabled()) {
                        SubForm.SubFormTableModel searchTableModel = scsfc.getSearchConditionTableModel();
                        CollectableTableModel<CollectableEntityObject> detailsTableModel = dsfc
                                .getCollectableTableModel();
                        Collection<CollectableMasterData> newCollectables = new ArrayList<CollectableMasterData>();
                        // iterate over each row found in the search subform
                        for (int iSearchRow = 0; iSearchRow < searchTableModel.getRowCount(); iSearchRow++) {
                            CollectableMasterData clctmd = dsfc.insertNewRow();
                            newCollectables.add(clctmd);
                            // iterate over each column found in the search subform
                            for (int iSearchColumn = 0; iSearchColumn < searchTableModel
                                    .getColumnCount(); iSearchColumn++)
                                // iterate over each coresponding column found in the detail subform
                                for (int columnDetail = 0; columnDetail < detailsTableModel
                                        .getColumnCount(); columnDetail++)
                                    if (searchTableModel.getColumnName(iSearchColumn)
                                            .equals(detailsTableModel.getColumnName(columnDetail))) {
                                        TableCellEditor tce = dsfc.getSubForm().getJTable()
                                                .getCellEditor(iSearchRow, columnDetail);

                                        if (tce instanceof CollectableComponentTableCellEditor) {
                                            boolean bSetAllowed = true;

                                            if (!isSetAllowedForClctComponent(
                                                    ((CollectableComponentTableCellEditor) tce)
                                                            .getCollectableComponent()))
                                                bSetAllowed = false;

                                            if (bSetAllowed) {
                                                Object oClctSearchCondition = searchTableModel
                                                        .getValueAt(iSearchRow, iSearchColumn);
                                                if (oClctSearchCondition != null) {
                                                    String sFieldName = ((AtomicCollectableSearchCondition) oClctSearchCondition)
                                                            .getFieldName();
                                                    Object oSearchValue = null;
                                                    Object oSearchValueId = null;
                                                    if (oClctSearchCondition instanceof CollectableComparison) {
                                                        CollectableField clctField = ((CollectableComparison) oClctSearchCondition)
                                                                .getComparand();
                                                        if (clctField instanceof CollectableValueIdField)
                                                            oSearchValueId = ((CollectableValueIdField) clctField)
                                                                    .getValueId();

                                                        oSearchValue = clctField.getValue();
                                                    } else if (oClctSearchCondition instanceof CollectableLikeCondition)
                                                        oSearchValue = ((CollectableLikeCondition) oClctSearchCondition)
                                                                .getLikeComparand();

                                                    if (oSearchValue != null) {
                                                        if (oSearchValueId != null) {
                                                            clctmd.setField(sFieldName,
                                                                    new CollectableValueIdField(oSearchValueId,
                                                                            oSearchValue));
                                                            if (clctmd.getMasterDataCVO() != null)
                                                                clctmd.getMasterDataCVO().setField(
                                                                        sFieldName + "Id", oSearchValueId);
                                                        } else {
                                                            clctmd.setField(sFieldName,
                                                                    new CollectableValueField(oSearchValue));
                                                            if (clctmd.getMasterDataCVO() != null)
                                                                clctmd.getMasterDataCVO().setField(sFieldName,
                                                                        oSearchValue);
                                                        }
                                                        newCollectables.remove(clctmd);
                                                        detailsChanged(dsfc.getSubForm());
                                                    }
                                                }
                                            }
                                        }
                                    }
                        }
                        for (CollectableMasterData clctmd : newCollectables) {
                            dsfc.getCollectableTableModel().remove(clctmd);
                        }
                    }
            }
}

From source file:edu.jhuapl.openessence.controller.ReportController.java

private Map<String, Object> createTimeseries(String userPrincipalName, DataSeriesSource dss,
        List<Filter> filters, GroupingImpl group, String timeResolution, Integer prepull,
        String graphTimeSeriesUrl, final Collection<Record> records, final List<Dimension> accumulations,
        final List<Dimension> timeseriesDenominators, String detectorClass, boolean includeDetails,
        boolean displayIntervalEndDate, GraphDataInterface graphData, TimeZone clientTimezone) {

    Map<String, Object> result = new HashMap<String, Object>();
    Map<String, ResolutionHandler> resolutionHandlers = null;
    result.put("success", false);
    try {/*from  w  w  w .j a va  2 s .  c om*/
        GroupingDimension grpdim = dss.getGroupingDimension(group.getId());
        resolutionHandlers = grpdim.getResolutionsMap();
        String dateFieldName = group.getId();
        Date startDate = null;
        Date endDate = null;
        if (grpdim != null
                && (grpdim.getSqlType() == FieldType.DATE || grpdim.getSqlType() == FieldType.DATE_TIME)) {
            for (Filter f : filters) {
                if (f instanceof OneArgOpFilter) {
                    OneArgOpFilter of = (OneArgOpFilter) f;
                    if (of.getFilterId().equalsIgnoreCase(grpdim.getId())
                            && (of.getSqlSnippet("").contains(">="))) {
                        startDate = (Date) of.getArguments().get(0);
                    } else if (of.getFilterId().equalsIgnoreCase(grpdim.getId())
                            && (of.getSqlSnippet("").contains("<="))) {
                        endDate = (Date) of.getArguments().get(0);
                    }
                }
            }
        }
        //union accumulations to get all results
        List<Dimension> dimensions = new ArrayList<Dimension>(
                ControllerUtils.unionDimensions(accumulations, timeseriesDenominators));

        int timeOffsetMillies = 0;
        String timezoneEnabledString = messageSource.getMessage(TIMEZONE_ENABLED, "false");
        if (timezoneEnabledString.equalsIgnoreCase("true")) {
            timeOffsetMillies = (clientTimezone.getRawOffset() - clientTimezone.getDSTSavings())
                    - (TimeZone.getDefault().getRawOffset() - TimeZone.getDefault().getDSTSavings());
        }
        Calendar startDayCal = Calendar.getInstance(clientTimezone);
        startDayCal.setTime(startDate);
        startDayCal.add(Calendar.MILLISECOND, timeOffsetMillies);

        //get data grouped by group dimension
        List<AccumPoint> points = extractAccumulationPoints(userPrincipalName, dss, records,
                startDayCal.getTime(), endDate, dimensions, group, resolutionHandlers);
        if (points.size() > 0) {
            DateFormat dateFormat = getDateFormat(timeResolution); //dateFormat.setTimeZone(timezone);
            DateFormat tmpDateFormat = (DateFormat) dateFormat.clone();
            tmpDateFormat.setTimeZone(clientTimezone);

            // number format for level
            NumberFormat numFormat3 = NumberFormat.getNumberInstance();
            numFormat3.setMinimumFractionDigits(0);
            numFormat3.setMaximumFractionDigits(3);

            // number format for expected count
            NumberFormat numFormat1 = NumberFormat.getNumberInstance();
            numFormat1.setMinimumFractionDigits(0);
            numFormat1.setMaximumFractionDigits(1);

            Calendar cal = new GregorianCalendar();
            cal.setTime(startDayCal.getTime());
            //offset start date to match prepull offset
            if (timeResolution.equals("weekly")) {
                cal.add(Calendar.DATE, (7 * prepull));
            } else if (timeResolution.equals("daily")) {
                cal.add(Calendar.DATE, prepull);
            }
            Date queryStartDate = cal.getTime();

            //-- Handles Denominator Types -- //
            double[] divisors = new double[points.size()];
            double multiplier = 1.0;
            boolean percentBased = false;
            String yAxisLabel = messageSource.getDataSourceMessage("graph.count", dss);

            boolean isDetectionDetector = !NoDetectorDetector.class.getName().equalsIgnoreCase(detectorClass);

            //if there is a denominator we need to further manipulate the data
            if (timeseriesDenominators != null && !timeseriesDenominators.isEmpty()) {
                // divisor is the sum of timeseriesDenominators
                divisors = totalSeriesValues(points, timeseriesDenominators);
                multiplier = 100.0;
                percentBased = true;
                yAxisLabel = messageSource.getDataSourceMessage("graph.percent", dss);
            } else {
                //the query is for total counts
                Arrays.fill(divisors, 1.0);
            }

            double[][] allCounts = new double[accumulations.size()][];
            int[][] allColors = new int[accumulations.size()][];
            String[][] allAltTexts = new String[accumulations.size()][];
            String[] dates = new String[] { "" };
            double[][] allExpecteds = new double[accumulations.size()][];
            double[][] allLevels = new double[accumulations.size()][];
            String[][] allLineSetURLs = new String[accumulations.size()][];
            String[][] allSwitchInfo = new String[accumulations.size()][];
            String[] lineSetLabels = new String[accumulations.size()];
            boolean[] displayAlerts = new boolean[accumulations.size()];

            //get all results
            Collection<Dimension> dims = new ArrayList<Dimension>(dss.getResultDimensions());
            Collection<String> dimIds = ControllerUtils.getDimensionIdsFromCollection(dims);
            Collection<String> accIds = ControllerUtils.getDimensionIdsFromCollection(dss.getAccumulations());
            //remove extra accumulations in the result set using string ids
            dimIds.removeAll(accIds);

            //for each accumulation we run detection and gather results
            int aIndex = 0;
            for (Dimension accumulation : accumulations) {
                String accumId = accumulation.getId();

                // use display name if it has one, otherwise translate its ID
                String accumIdTranslated = accumulation.getDisplayName();
                if (accumIdTranslated == null) {
                    accumIdTranslated = messageSource.getDataSourceMessage(accumulation.getId(), dss);
                }

                TemporalDetectorInterface TDI = (TemporalDetectorInterface) DetectorHelper
                        .createObject(detectorClass);
                TemporalDetectorSimpleDataObject TDDO = new TemporalDetectorSimpleDataObject();

                int[] colors;
                double[] counts;
                String[] altTexts;
                double[] expecteds;
                double[] levels;
                String[] switchInfo;
                String[] urls;

                //pull the counts from the accum array points
                double[] seriesDoubleArray = generateSeriesValues(points, accumId);

                //run divisor before detection
                for (int i = 0; i < seriesDoubleArray.length; i++) {
                    double div = divisors[i];
                    if (div == 0) {
                        seriesDoubleArray[i] = 0.0;
                    } else {
                        seriesDoubleArray[i] = (seriesDoubleArray[i] / div) * multiplier;
                    }
                }

                //run detection
                TDDO.setCounts(seriesDoubleArray);
                TDDO.setStartDate(startDate);
                TDDO.setTimeResolution(timeResolution);

                try {
                    TDI.runDetector(TDDO);
                } catch (Exception e) {
                    String errorMessage = "Failure to create Timeseries";
                    if (e.getMessage() != null) {
                        errorMessage = errorMessage + ":<BR>" + e.getMessage();
                    }
                    result.put("message", errorMessage);
                    result.put("success", false);
                    return result;
                }

                TDDO.cropStartup(prepull);
                counts = TDDO.getCounts();
                int tddoLength = counts.length;

                if (!DAILY.equalsIgnoreCase(timeResolution)) {
                    //toggle between start date and end date
                    //TDDO.setDates(getOurDates(startDate, endDate, tddoLength, timeResolution));
                    TDDO.setDates(getOurDates(queryStartDate, endDate, tddoLength, timeResolution,
                            displayIntervalEndDate));
                }
                double[] tcolors = TDDO.getColors();

                Date[] tdates = TDDO.getDates();
                altTexts = TDDO.getAltTexts();
                expecteds = TDDO.getExpecteds();
                levels = TDDO.getLevels();
                switchInfo = TDDO.getSwitchInfo();
                colors = new int[tddoLength];
                dates = new String[tddoLength];
                urls = new String[tddoLength];

                //add the accumId for the current series
                dimIds.add(accumId);

                StringBuilder jsCall = new StringBuilder();
                jsCall.append("javascript:OE.report.datasource.showDetails({");
                jsCall.append("dsId:'").append(dss.getClass().getName()).append("'");
                //specify results
                jsCall.append(",results:[")
                        .append(StringUtils.collectionToDelimitedString(dimIds, ",", "'", "'")).append(']');
                //specify accumId
                jsCall.append(",accumId:'").append(accumId).append("'");

                addJavaScriptFilters(jsCall, filters, dateFieldName);

                //this builds urls and hover texts
                int startDay = getWeekStartDay(resolutionHandlers);

                Calendar c = Calendar.getInstance(clientTimezone);

                //               Calendar curr = Calendar.getInstance();
                for (int i = 0; i < tddoLength; i++) {
                    colors[i] = (int) tcolors[i];

                    // For a time series data point, set time to be current server time
                    // This will allow us to convert this data point date object to be request timezone date
                    c.setTime(tdates[i]);
                    c.add(Calendar.MILLISECOND, timeOffsetMillies);

                    if (timeResolution.equals(WEEKLY)) {
                        dates[i] = dateFormatWeekPart.format(tdates[i]) + "-W"
                                + PgSqlDateHelper.getWeekOfYear(startDay, c) + "-"
                                + PgSqlDateHelper.getYear(startDay, c);
                    } else {
                        dates[i] = tmpDateFormat.format(c.getTime());
                    }

                    altTexts[i] = "(" + accumIdTranslated + ") " + // Accum
                            "Date: " + dates[i] + // Date
                            ", Level: " + numFormat3.format(levels[i]) + // Level
                            ", Count: " + ((int) counts[i]) + // Count
                            ", Expected: " + numFormat1.format(expecteds[i]); // Expected

                    if (switchInfo != null) {
                        altTexts[i] += ", Switch: " + switchInfo[i] + ", ";
                    }

                    // build the click through url
                    StringBuilder tmp = new StringBuilder(jsCall.toString());

                    // add the date field with start and end dates from the data point
                    if (!DAILY.equalsIgnoreCase(timeResolution)) {
                        Calendar timeSet = Calendar.getInstance(clientTimezone);
                        timeSet.setTime(tdates[i]);

                        if (WEEKLY.equalsIgnoreCase(timeResolution)) {
                            timeSet.set(Calendar.DAY_OF_WEEK, startDay + 1);
                            tmp.append(",").append(dateFieldName).append("_start:'")
                                    .append(timeSet.getTimeInMillis()).append("'");
                            timeSet.add(Calendar.DAY_OF_YEAR, 6);
                            tmp.append(",").append(dateFieldName).append("_end:'")
                                    .append(timeSet.getTimeInMillis()).append("'");
                        } else if (MONTHLY.equalsIgnoreCase(timeResolution)) {
                            // Compute last day of month
                            timeSet.set(Calendar.DAY_OF_MONTH, 1);
                            timeSet.add(Calendar.MONTH, 1);
                            timeSet.add(Calendar.DAY_OF_YEAR, -1);
                            tmp.append(",").append(dateFieldName).append("_end:'")
                                    .append(timeSet.getTimeInMillis()).append("'");
                            // set first day of month
                            timeSet.set(Calendar.DAY_OF_MONTH, 1);
                            tmp.append(",").append(dateFieldName).append("_start:'")
                                    .append(timeSet.getTimeInMillis()).append("'");
                        } else if (YEARLY.equalsIgnoreCase(timeResolution)) {
                            // Compute last day of month
                            timeSet.set(Calendar.DATE, 31);
                            timeSet.add(Calendar.MONTH, Calendar.DECEMBER);
                            tmp.append(",").append(dateFieldName).append("_end:'")
                                    .append(timeSet.getTimeInMillis()).append("'");
                            timeSet.set(Calendar.DATE, 1);
                            timeSet.add(Calendar.MONTH, Calendar.JANUARY);
                            tmp.append(",").append(dateFieldName).append("_start:'")
                                    .append(timeSet.getTimeInMillis()).append("'");
                        }
                    } else {
                        // compute end date for individual data points based on the selected resolution
                        //                     detailsPointEndDate = computeEndDate(tdates[i],timeResolution);
                        // add the date field with start and end dates from the data point
                        tmp.append(",").append(dateFieldName).append("_start:'").append(tdates[i].getTime())
                                .append("'");
                        tmp.append(",").append(dateFieldName).append("_end:'").append(tdates[i].getTime())
                                .append("'");
                    }
                    tmp.append("});");
                    urls[i] = tmp.toString();
                }

                allCounts[aIndex] = counts;
                allColors[aIndex] = colors;
                allAltTexts[aIndex] = altTexts;
                allExpecteds[aIndex] = expecteds;
                allLevels[aIndex] = levels;
                allLineSetURLs[aIndex] = urls;
                allSwitchInfo[aIndex] = switchInfo;
                lineSetLabels[aIndex] = accumIdTranslated;
                displayAlerts[aIndex] = isDetectionDetector;
                aIndex++;

                //remove the accumId for the next series
                dimIds.remove(accumId);
            }

            GraphDataSerializeToDiskHandler hndl = new GraphDataSerializeToDiskHandler(graphDir);
            GraphController gc = getGraphController(null, hndl, userPrincipalName);
            //TODO figure out why I (hodancj1) added this to be accumulation size ~Feb 2012
            // gc.setMaxLegendItems(accumulations.size());

            graphData.setShowSingleAlertLegends(isDetectionDetector);
            graphData.setCounts(allCounts);
            graphData.setColors(allColors);
            graphData.setAltTexts(allAltTexts);
            graphData.setXLabels(dates);
            graphData.setExpecteds(allExpecteds);
            graphData.setLevels(allLevels);
            graphData.setLineSetURLs(allLineSetURLs);
            graphData.setLineSetLabels(lineSetLabels);
            graphData.setDisplayAlerts(displayAlerts);
            // graphData.setDisplaySeverityAlerts(displayAlerts);
            graphData.setPercentBased(percentBased);

            graphData.setXAxisLabel(messageSource.getDataSourceMessage(group.getResolution(), dss));
            graphData.setYAxisLabel(yAxisLabel);

            int maxLabels = graphData.getGraphWidth() / 30;
            graphData.setMaxLabeledCategoryTicks(Math.min(maxLabels, allCounts[0].length));

            StringBuffer sb = new StringBuffer();
            GraphObject graph = gc.writeTimeSeriesGraph(sb, graphData, true, true, false, graphTimeSeriesUrl);

            result.put("html", sb.toString());

            //added to build method calls from javascript
            Map<String, Object> graphConfig = new HashMap<String, Object>();
            graphConfig.put("address", graphTimeSeriesUrl);
            graphConfig.put("graphDataId", graph.getGraphDataId());
            graphConfig.put("imageMapName", graph.getImageMapName());

            graphConfig.put("graphTitle", graphData.getGraphTitle());
            graphConfig.put("xAxisLabel", graphData.getXAxisLabel());
            graphConfig.put("yAxisLabel", graphData.getYAxisLabel());
            graphConfig.put("xLabels", graphData.getXLabels());
            graphConfig.put("graphWidth", graphData.getGraphWidth());
            graphConfig.put("graphHeight", graphData.getGraphHeight());

            graphConfig.put("yAxisMin", graph.getYAxisMin());
            graphConfig.put("yAxisMax", graph.getYAxisMax());

            // fix invalid JSON coming from GraphController
            String dataSeriesJson = graph.getDataSeriesJSON().replaceFirst("\\{", "")
                    // remove trailing "}"
                    .substring(0, graph.getDataSeriesJSON().length() - 2);

            // read malformed JSON
            ObjectMapper mapper = new ObjectMapper();
            JsonFactory jsonFactory = mapper.getJsonFactory()
                    .configure(Feature.ALLOW_UNQUOTED_FIELD_NAMES, true)
                    .configure(Feature.ALLOW_UNQUOTED_CONTROL_CHARS, true);
            JsonParser jsonParser = jsonFactory.createJsonParser(dataSeriesJson);

            // array of String -> Object maps
            TypeReference<Map<String, Object>[]> dataSeriesType = new TypeReference<Map<String, Object>[]>() {
            };

            // write JSON as Map so that it can be serialized properly back to JSON
            Map<String, Object>[] seriesMap = mapper.readValue(jsonParser, dataSeriesType);
            graphConfig.put("dataSeriesJSON", seriesMap);

            if (includeDetails) {
                int totalPoints = 0;
                List<HashMap<String, Object>> details = new ArrayList<HashMap<String, Object>>();
                HashMap<String, Object> detail;
                for (int i = 0; i < allCounts.length; i++) {
                    for (int j = 0; j < allCounts[i].length; j++) {
                        totalPoints++;
                        detail = new HashMap<String, Object>();
                        detail.put("Date", dates[j]);
                        detail.put("Series", lineSetLabels[i]);
                        detail.put("Level", allLevels[i][j]);
                        detail.put("Count", allCounts[i][j]);
                        if (!ArrayUtils.isEmpty(allExpecteds[i])) {
                            detail.put("Expected", allExpecteds[i][j]);
                        }
                        if (!ArrayUtils.isEmpty(allSwitchInfo[i])) {
                            detail.put("Switch", allSwitchInfo[i][j]);
                        }
                        detail.put("Color", allColors[i][j]);
                        details.add(detail);
                    }
                }
                result.put("detailsTotalRows", totalPoints);
                result.put("details", details);
            }
            result.put("graphConfiguration", graphConfig);
            result.put("success", true);
        } else {
            StringBuilder sb = new StringBuilder();
            sb.append("<h2>" + messageSource.getDataSourceMessage("graph.nodataline1", dss) + "</h2>");
            sb.append("<p>" + messageSource.getDataSourceMessage("graph.nodataline2", dss) + "</p>");
            result.put("html", sb.toString());
            result.put("success", true);
        }
    } catch (Exception e) {
        log.error("Failure to create Timeseries", e);
    }
    return result;
}

From source file:org.apache.mahout.freqtermsets.fpgrowth.FPGrowth.java

/**
 * Internal TopKFrequentPattern Generation algorithm, which represents the
 * A's as integers and transforms features to use only integers
 * /*  w w w . j  a  v a2s  .  c o  m*/
 * @param transactions
 *          Transaction database Iterator
 * @param attributeFrequency
 *          array representing the Frequency of the corresponding
 *          attribute id
 * @param minSupport
 *          minimum support of the pattern to be mined
 * @param k
 *          Max value of the Size of the Max-Heap in which Patterns are
 *          held
 * @param featureSetSize
 *          number of features
 * @param returnFeatures
 *          the id's of the features for which Top K patterns have to be
 *          mined
 * @param topKPatternsOutputCollector
 *          the outputCollector which transforms the given Pattern in
 *          integer format to the corresponding A Format
 */
private void generateTopKFrequentPatterns(
        // Iterator<Pair<int[], Long>> transactions,
        TransactionTree cTree, OpenObjectIntHashMap<A> attributeIdMapping, long[] attributeFrequency,
        long minSupport, int k, int featureSetSize, Collection<Integer> returnFeatures,
        TopKPatternsOutputConverter<A> topKPatternsOutputCollector, StatusUpdater updater) throws IOException {
    // YA: BONSAAAAAAAII {
    // FPTree tree = new FPTree(featureSetSize);
    FPTree tree = null;
    boolean change = true;
    int pruneIters = 0;
    IntArrayList pruneByContingencyCount = new IntArrayList();
    IntArrayList pruneBySpreadCount = new IntArrayList();

    while (change) {
        pruneByContingencyCount.add(0);
        pruneBySpreadCount.add(0);

        change = false;
        tree = new FPTree(featureSetSize);
        OpenIntLongHashMap[] childJointFreq;
        long[] sumChildSupport;
        if (BONSAI_PRUNE) {
            childJointFreq = new OpenIntLongHashMap[featureSetSize];
            sumChildSupport = new long[featureSetSize];
        }
        double supportGrandTotal = 0;
        // } YA: BONSAAAAAAAII

        for (int i = 0; i < featureSetSize; i++) {
            tree.addHeaderCount(i, attributeFrequency[i]);

            // YA: BONSAAAAAAAII {
            if (attributeFrequency[i] < 0) {
                continue; // this is an attribute not satisfying the
                // monotone constraint
            }
            if (BONSAI_PRUNE) {
                childJointFreq[i] = new OpenIntLongHashMap();
                supportGrandTotal += attributeFrequency[i];
            }
            // } YA: Bonsai
        }

        // Constructing initial FPTree from the list of transactions
        // YA Bonsai : To pass the tree itself the iterator now would work
        // only with ints.. the A type argument is
        // not checked in the constructor. TOD: remove the type argument and
        // force using ints only
        Iterator<Pair<int[], Long>> transactions = new IntTransactionIterator(cTree.iterator(),
                attributeIdMapping);

        int nodecount = 0;
        // int attribcount = 0;
        int i = 0;
        while (transactions.hasNext()) {
            Pair<int[], Long> transaction = transactions.next();
            Arrays.sort(transaction.getFirst());
            // attribcount += transaction.length;
            // YA: Bonsai {
            // nodecount += treeAddCount(tree, transaction.getFirst(),
            // transaction.getSecond(), minSupport, attributeFrequency);
            int temp = FPTree.ROOTNODEID;
            boolean addCountMode = true;
            for (int attribute : transaction.getFirst()) {
                if (attributeFrequency[attribute] < 0) {
                    continue; // this is an attribute not satisfying the
                    // monotone constraint
                }
                if (attributeFrequency[attribute] < minSupport) {
                    break;
                }
                if (BONSAI_PRUNE && tree.attribute(temp) != -1) { // Root node
                    childJointFreq[tree.attribute(temp)].put(attribute,
                            childJointFreq[tree.attribute(temp)].get(attribute) + transaction.getSecond());
                    sumChildSupport[tree.attribute(temp)] += transaction.getSecond();
                }
                int child;
                if (addCountMode) {
                    child = tree.childWithAttribute(temp, attribute);
                    if (child == -1) {
                        addCountMode = false;
                    } else {
                        tree.addCount(child, transaction.getSecond());
                        temp = child;
                    }
                }
                if (!addCountMode) {
                    child = tree.createNode(temp, attribute, transaction.getSecond());
                    temp = child;
                    nodecount++;
                }
            }
            // } YA Bonsai
            i++;
            if (i % 10000 == 0) {
                log.info("FPTree Building: Read {} Transactions", i);
            }
        }

        log.info("Number of Nodes in the FP Tree: {}", nodecount);

        // YA: BONSAAAAAAAII {
        if (BONSAI_PRUNE) {
            if (log.isTraceEnabled())
                log.info("Bonsai prunining tree: {}", tree.toString());

            for (int a = 0; a < tree.getHeaderTableCount(); ++a) {
                int attr = tree.getAttributeAtIndex(a);

                if (attributeFrequency[attr] < 0) {
                    continue; // this is an attribute not satisfying the
                    // monotone constraint
                }
                if (attributeFrequency[attr] < minSupport) {
                    break;
                }
                // if (sumChildSupport[attr] < attributeFrequency[attr]) {
                // // the case of . (full stop) as the next child
                // childJointFreq[attr]
                // .put(-1,
                // (long) (attributeFrequency[attr] - sumChildSupport[attr]));
                // }
                float numChildren = childJointFreq[attr].size();

                // if (numChildren < LEAST_NUM_CHILDREN_TO_VOTE_FOR_NOISE) {
                // continue;
                // }
                if (log.isTraceEnabled()) {
                    log.trace("Voting for noisiness of attribute {} with number of children: {}", attr,
                            numChildren);
                    log.trace("Attribute support: {} - Total Children support: {}", attributeFrequency[attr],
                            sumChildSupport[attr]);
                }
                // EMD and the such.. the threshold isn't easy to define, and it
                // also doesn't take into account the weights of children.
                // // double uniformProb = 1.0 / numChildren;
                // // double uniformProb = sumChildSupport[attr] /
                // supportGrandTotal;
                // double uniformFreq = attributeFrequency[attr] / numChildren;
                // IntArrayList childAttrArr = childJointFreq[attr].keys();
                // // IntArrayList childAttrArr = new IntArrayList();
                // // childJointFreq[attr].keysSortedByValue(childAttrArr);
                // double totalDifference = 0;
                // double sumOfWeights = 0;
                // // double emd = 0;
                // for (int c = childAttrArr.size() - 1; c >=0 ; --c) {
                // int childAttr = childAttrArr.get(c);
                // double childJF = childJointFreq[attr].get(childAttr);
                // double childWeight = attributeFrequency[childAttr];
                // totalDifference += childWeight * Math.abs(childJF -
                // uniformFreq);
                // sumOfWeights += childWeight;
                //
                // // double jointProb = childJF /
                // // supportGrandTotal;
                // // double childProb = attributeFrequency[childAttr] /
                // // supportGrandTotal;
                // // double childConditional = childJF /
                // attributeFrequency[attr];
                // // emd = childConditional + emd - uniformProb;
                // // emd = childJF + emd - uniformFreq;
                // // totalDifference += Math.abs(emd);
                // }
                // // Probability (D > observed ) = QKS Ne + 0.12 + 0.11/ Ne D
                // // double pNotUniform = totalDifference / attrSupport;
                // // double threshold = (numChildren * (numChildren - 1) * 1.0)
                // // / (2.0 * attributeFrequency[attr]);
                // double weightedDiff = totalDifference / sumOfWeights;
                // double threshold = sumOfWeights / 2.0; // each child can be
                // up to
                // // 1 over or below the
                // // uniform freq
                // boolean noise = weightedDiff < threshold;
                // log.info("EMD: {} - Threshold: {}", weightedDiff, threshold);
                // ///////////////////////////////////
                // Log odds.. this is my hartala, and it needs ot be shifted
                // according to the number of children
                // // // if there is one child then the prob of random choice
                // // will be
                // // // 1, so anything would be
                // // // noise
                // // // and if there are few then the probability that this is
                // // // actually noise declines
                // // if (numChildren >= LEAST_NUM_CHILDREN_TO_VOTE_FOR_NOISE)
                // // {
                // // log.info(
                // //
                // "Voting for noisiness of attribute {} with number of children: {}",
                // // currentAttribute, numChildren);
                // // log.info(
                // // "Attribute support: {} - Total Children support: {}",
                // // attrSupport, sumOfChildSupport);
                // // int noiseVotes = 0;
                // // double randomSelectionLogOdds = 1.0 / numChildren;
                // // randomSelectionLogOdds = Math.log(randomSelectionLogOdds
                // // / (1 - randomSelectionLogOdds));
                // // randomSelectionLogOdds =
                // // Math.abs(randomSelectionLogOdds);
                // //
                // // IntArrayList childAttrArr = childJointFreq.keys();
                // // for (int c = 0; c < childAttrArr.size(); ++c) {
                // // double childConditional = 1.0
                // // * childJointFreq.get(childAttrArr.get(c))
                // // / sumOfChildSupport; // attrSupport;
                // // double childLogOdds = Math.log(childConditional
                // // / (1 - childConditional));
                // // if (Math.abs(childLogOdds) <= randomSelectionLogOdds) {
                // // // probability of the child given me is different
                // // // than
                // // // probability of choosing the
                // // // child randomly
                // // // from among my children.. using absolute log odds
                // // // because they are symmetric
                // // ++noiseVotes;
                // // }
                // // }
                // // log.info("Noisy if below: {} - Noise votes: {}",
                // // randomSelectionLogOdds, noiseVotes);
                // // noise = noiseVotes == numChildren;
                // ////////////////////////////////////////////////////

                // // Kullback-liebler divergence from the uniform distribution
                // double randomChild = 1.0 / numChildren;
                // IntArrayList childAttrArr = childJointFreq[attr].keys();
                //
                // double klDivergence = 0;
                // for (int c = 0; c < childAttrArr.size(); ++c) {
                // double childConditional = 1.0
                // * childJointFreq[attr].get(childAttrArr.get(c))
                // / attributeFrequency[attr];
                // if (childConditional == 0) {
                // continue; // a7a!
                // }
                // klDivergence += childConditional
                // * Math.log(childConditional / randomChild);
                // }
                //
                // boolean noise = Math.abs(klDivergence) < 0.05;
                // log.info("KL-Divergence: {} - Noise less than: {}",
                // klDivergence, 0.05);
                // //////////////////////////////////////
                // Pair wise metric with different children
                SummaryStatistics metricSummary = new SummaryStatistics();
                // double[] metric = new double[(int) numChildren];

                // SummaryStatistics spreadSummary = new SummaryStatistics();
                // double uniformSpread = attributeFrequency[attr] /
                // numChildren;
                double goodnessOfFit = 0.0;
                // If I don't take the . into account: sumChildSupport[attr] /
                // numChildren;

                double sumOfWeights = 0;
                IntArrayList childAttrArr = childJointFreq[attr].keys();
                for (int c = 0; c < childAttrArr.size(); ++c) {
                    int childAttr = childAttrArr.get(c);
                    double[][] contingencyTable = new double[2][2];
                    if (childAttr == -1) {
                        // this is meaningless, as yuleq will just be 1
                        contingencyTable[1][1] = childJointFreq[attr].get(childAttr);
                        contingencyTable[1][0] = sumChildSupport[attr];
                        // equals attributeFrequency[attr] -
                        // contingencyTable[1][1];
                        contingencyTable[0][1] = 0;
                        contingencyTable[0][0] = supportGrandTotal - attributeFrequency[attr];
                    } else {
                        contingencyTable[1][1] = childJointFreq[attr].get(childAttr);
                        contingencyTable[1][0] = attributeFrequency[attr] - contingencyTable[1][1];
                        contingencyTable[0][1] = attributeFrequency[childAttr] - contingencyTable[1][1];
                        contingencyTable[0][0] = supportGrandTotal - attributeFrequency[attr]
                                - attributeFrequency[childAttr] + contingencyTable[1][1];
                        // because of the meninglessness of yuleq in case of . }
                        double ad = contingencyTable[0][0] * contingencyTable[1][1];
                        double bc = contingencyTable[0][1] * contingencyTable[1][0];
                        double yuleq = (ad - bc) / (ad + bc);
                        double weight = attributeFrequency[childAttr];
                        sumOfWeights += weight;
                        metricSummary.addValue(Math.abs(yuleq * weight));
                        // metricSummary.addValue(yuleq * yuleq * weight);
                    }
                    // spreadSummary.addValue(Math.abs(uniformSpread
                    // - contingencyTable[1][1])
                    // / numChildren);
                    // spreadSummary.addValue(contingencyTable[1][1]); // *
                    // weight
                    goodnessOfFit += contingencyTable[1][1] * contingencyTable[1][1];
                }
                // double weightedquadraticMean =
                // Math.sqrt(metricSummary.getSum() / sumOfWeights);
                double weightedMean = (metricSummary.getSum() / sumOfWeights);

                boolean noise = false;
                // if (weightedMean < 0.5) {
                // pruneByContingencyCount.set(pruneIters, pruneByContingencyCount.get(pruneIters) + 1);
                // noise = true;
                // } else if (weightedMean < 0.95) {
                if (numChildren > 1) {
                    double n = sumChildSupport[attr]; // attributeFrequency[attr];
                    goodnessOfFit /= (n / numChildren);
                    goodnessOfFit -= n;
                    ChiSquaredDistributionImpl chisqDist = new ChiSquaredDistributionImpl(numChildren - 1);
                    double criticalPoint = -1;
                    try {
                        criticalPoint = chisqDist.inverseCumulativeProbability(1.0 - SIGNIFICANCE / 2.0);
                    } catch (MathException e) {
                        log.error(e.getMessage(), e);
                    }
                    if (goodnessOfFit < criticalPoint) {
                        pruneBySpreadCount.set(pruneIters, pruneBySpreadCount.get(pruneIters) + 1);
                        noise = true;
                    }
                    // // double spreadCentraltendency = (spreadSummary.getMax()
                    // -
                    // // spreadSummary.getMin()) / 2.0;
                    // // spreadSummary.getMean();
                    // // double uniformSpread = sumChildSupport[attr] /
                    // // numChildren;
                    //
                    // // noise = Math.abs(spreadCentraltendency -
                    // uniformSpread) <
                    // // 1e-4;
                    //
                    // double spreadCentraltendency = spreadSummary.getMean();
                    // // (spreadSummary.getMax() -
                    // // spreadSummary.getMin()) / 2.0;
                    // if(spreadCentraltendency < 1e-6){
                    // noise = true;
                    // }
                    //
                    // if (!noise && numChildren > 0) {
                    // // see if the difference is statitically significant
                    // double spreadCI = getConfidenceIntervalHalfWidth(
                    // spreadSummary, SIGNIFICANCE);
                    // spreadCentraltendency -= spreadCI;
                    // if (spreadCentraltendency < 0) {
                    // noise = true;
                    // }
                    // // // noise if the CI contains the uniform spread
                    // // threshold
                    // // if (spreadCentraltendency > uniformSpread) {
                    // // noise = (spreadCentraltendency - spreadCI) <
                    // // uniformSpread;
                    // // } else {
                    // // noise = (spreadCentraltendency + spreadCI) >
                    // // uniformSpread;
                    // // }
                    // }
                }
                change |= noise;

                if (noise) {
                    if (log.isTraceEnabled())
                        log.info("Pruning attribute {} with child joint freq {}", attr, childJointFreq[attr]);
                    returnFeatures.remove(attr);
                    attributeFrequency[attr] = -1;

                }
            }
        }
        ++pruneIters;
    }
    if (log.isTraceEnabled()) {
        log.info("Pruned tree: {}", tree.toString());
        log.info("Prune by contingency: {} - Prune by spread: {}", pruneByContingencyCount.toString(),
                pruneBySpreadCount.toString());
    }
    // } YA: Bonsai
    fpGrowth(tree, minSupport, k, returnFeatures, topKPatternsOutputCollector, updater);
}