Example usage for com.google.common.collect Multimap remove

List of usage examples for com.google.common.collect Multimap remove

Introduction

In this page you can find the example usage for com.google.common.collect Multimap remove.

Prototype

boolean remove(@Nullable Object key, @Nullable Object value);

Source Link

Document

Removes a single key-value pair with the key key and the value value from this multimap, if such exists.

Usage

From source file:com.alibaba.otter.node.etl.transform.transformer.RowDataTransformer.java

private EventColumn translateColumn(EventData data, EventColumn scolumn, TableInfoHolder tableHolder,
        DataMediaPair dataMediaPair, Multimap<String, String> translateColumnNames) {
    EventType type = data.getEventType();
    EventColumn tcolumn = new EventColumn();
    tcolumn.setNull(scolumn.getColumnValue() == null);
    tcolumn.setKey(scolumn.isKey());// ??????
    tcolumn.setIndex(scolumn.getIndex());
    tcolumn.setUpdate(scolumn.isUpdate());

    String columnName = translateColumnName(scolumn.getColumnName(), dataMediaPair, translateColumnNames);
    if (StringUtils.isBlank(columnName)) {
        throw new TransformException("can't translate column name:" + scolumn.getColumnName() + "in pair:"
                + dataMediaPair.toString());
    }//from  w  ww .j  a  v  a  2 s . com

    // ?
    // columnName = StringUtils.remove(columnName, "`"); //
    // ?eromanga??
    tcolumn.setColumnName(columnName);
    tcolumn.setColumnType(scolumn.getColumnType());// ????
    if (tableHolder != null) {
        // modify by ljh at 2013-01-23
        // ??????T?
        // ?ps. mysql?
        // ???????null?(??columndefaultValue??)
        boolean canColumnsNotExist = tableHolder.isEnableCompatibleMissColumn();
        if (type == EventType.UPDATE) {
            // ???null
            canColumnsNotExist &= !scolumn.isUpdate() && scolumn.isNull();
        } else if (type == EventType.INSERT) {
            // ?null
            canColumnsNotExist &= scolumn.isNull();
        } else if (type == EventType.DELETE) {
            canColumnsNotExist &= !scolumn.isKey(); // ???
        }

        Column matchDbColumn = getMatchColumn(tableHolder.getTable().getColumns(), tcolumn.getColumnName());
        // ????DDL??meta?
        if (matchDbColumn == null) { // ?reloadtable meta
            // ??
            DbMediaSource dbMediaSource = (DbMediaSource) dataMediaPair.getTarget().getSource();
            DbDialect dbDialect = dbDialectFactory.getDbDialect(dataMediaPair.getPipelineId(), dbMediaSource);
            String schemaName = tableHolder.getTable().getSchema();
            if (StringUtils.isEmpty(schemaName)) {
                schemaName = tableHolder.getTable().getCatalog();
            }
            Table table = dbDialect.findTable(schemaName, tableHolder.getTable().getName(), false); // ??cache

            tableHolder.setTable(table);
            matchDbColumn = getMatchColumn(tableHolder.getTable().getColumns(), tcolumn.getColumnName());
            if (matchDbColumn == null) {
                if (canColumnsNotExist) {
                    return null;
                } else {
                    throw new TransformException(scolumn.getColumnName() + " is not found in "
                            + table.toString() + " and source : " + dataMediaPair.getTarget().getNamespace()
                            + "." + dataMediaPair.getTarget().getName());
                }
            }
        }

        if (tableHolder.isUseTableTransform()) {
            int sqlType = matchDbColumn.getTypeCode();
            tcolumn.setColumnType(sqlType);
        }
    }

    // if (dataMediaPair.getTarget().getSource().getType().isOracle()) {
    // // ?oracle?
    // String encodeValue = SqlUtils.encoding(scolumn.getColumnValue(),
    // scolumn.getColumnType(),
    // dataMediaPair.getSource().getSource().getEncode(),
    // dataMediaPair.getTarget().getSource().getEncode());
    // tcolumn.setColumnValue(encodeValue);
    // } else {
    // mysql????
    tcolumn.setColumnValue(scolumn.getColumnValue());
    // }
    translateColumnNames.remove(scolumn.getColumnName(), columnName);// ?????
    return tcolumn;
}

From source file:moa2014.MOAH52014.java

public static int principal(int[][] matrizatual) {
    long startTime = System.currentTimeMillis();
    Multimap<Integer, String> open_list = TreeMultimap.create();
    HashMap<String, Estado> processados = new HashMap();

    int h1x = MOAH12014.diferencaMatriz(matrizatual);
    int h2x = MOAH22014.diferencaMatriz(matrizatual);
    int h3x = MOAH32014.diferencaMatriz(matrizatual);

    int difmatrizatual = maior(h1x, h2x, h3x);

    String stringmatriz = transformaMatrizString(matrizatual);
    open_list.put(difmatrizatual, stringmatriz);
    Estado estadoatual = new Estado(matrizatual, 0);
    processados.put(stringmatriz, estadoatual);

    int arvoresgeradas = 0;
    int arvoresprocessadas = 0;

    while (!open_list.isEmpty()) {

        Iterator iterator = open_list.keySet().iterator();

        Integer key = (Integer) iterator.next();
        String matrizatualx1 = open_list.asMap().get(key).iterator().next();
        Estado estadomenor = processados.get(matrizatualx1);
        int altura = estadomenor.getCusto();

        //LOCALIZA O ZERO
        int[] zerot = localizazero(estadomenor.getMatriz());
        int x = zerot[0];
        int y = zerot[1];
        int x0 = x - 1;
        int x1 = x + 1;
        int y0 = y - 1;
        int y1 = y + 1;
        int difmatrizatualx = MOAH32014.diferencaMatriz(estadomenor.getMatriz());
        if (difmatrizatualx == 0) {
            long endTime = System.currentTimeMillis();
            System.out.println("---------------------------------------");
            System.out.println("Arvores Geradas: " + arvoresgeradas);
            System.out.println("Arvores Processadas: " + arvoresprocessadas);
            System.out.println("Quantidade de Movimentos: " + estadomenor.getCusto());
            System.out.println("Tempo de processamento " + (endTime - startTime) + " ms");
            System.out.println("---------------------------------------\n\n");
            return 0;
        }/*  w  w w.j a v  a 2 s  . co  m*/
        int[][] matrizatualx = estadomenor.getMatriz();
        arvoresprocessadas++;
        if (x0 >= 0) {

            int[][] matriz;
            matriz = copyarray(matrizatualx);
            matriz[x][y] = matrizatualx[x0][y];
            matriz[x0][y] = matrizatualx[x][y];

            String stringmatriz1 = transformaMatrizString(matriz);
            if (!(processados.containsKey(stringmatriz1))) {
                arvoresgeradas++;
                h1x = MOAH12014.diferencaMatriz(matriz);
                h2x = MOAH22014.diferencaMatriz(matriz);
                h3x = MOAH32014.diferencaMatriz(matriz);
                int diferencamatriz = maior(h1x, h2x, h3x);
                int custototal = diferencamatriz + altura + 1;

                Estado estadonovo = new Estado(matriz, altura + 1);
                open_list.put(custototal, stringmatriz1);

                processados.put(stringmatriz1, estadonovo);

            }
        }
        if (x1 <= 3) {
            int[][] matriz;
            matriz = copyarray(matrizatualx);
            matriz[x][y] = matrizatualx[x1][y];
            matriz[x1][y] = matrizatualx[x][y];
            String stringmatriz2 = transformaMatrizString(matriz);

            if (!(processados.containsKey(stringmatriz2))) {
                arvoresgeradas++;
                h1x = MOAH12014.diferencaMatriz(matriz);
                h2x = MOAH22014.diferencaMatriz(matriz);
                h3x = MOAH32014.diferencaMatriz(matriz);
                int diferencamatriz = maior(h1x, h2x, h3x);
                int custototal = diferencamatriz + altura + 1;

                Estado estadonovo = new Estado(matriz, altura + 1);
                open_list.put(custototal, stringmatriz2);

                processados.put(stringmatriz2, estadonovo);

            }
        }
        if (y0 >= 0) {
            int[][] matriz;
            matriz = copyarray(matrizatualx);
            matriz[x][y] = matrizatualx[x][y0];
            matriz[x][y0] = matrizatualx[x][y];
            String stringmatriz3 = transformaMatrizString(matriz);

            if (!(processados.containsKey(stringmatriz3))) {
                arvoresgeradas++;
                h1x = MOAH12014.diferencaMatriz(matriz);
                h2x = MOAH22014.diferencaMatriz(matriz);
                h3x = MOAH32014.diferencaMatriz(matriz);
                int diferencamatriz = maior(h1x, h2x, h3x);
                int custototal = diferencamatriz + altura + 1;

                Estado estadonovo = new Estado(matriz, altura + 1);
                open_list.put(custototal, stringmatriz3);

                processados.put(stringmatriz3, estadonovo);

            }
        }
        if (y1 <= 3) {
            int[][] matriz;
            matriz = copyarray(matrizatualx);
            matriz[x][y] = matrizatualx[x][y1];
            matriz[x][y1] = matrizatualx[x][y];

            String stringmatriz4 = transformaMatrizString(matriz);

            if (!(processados.containsKey(stringmatriz4))) {
                arvoresgeradas++;
                h1x = MOAH12014.diferencaMatriz(matriz);
                h2x = MOAH22014.diferencaMatriz(matriz);
                h3x = MOAH32014.diferencaMatriz(matriz);
                int diferencamatriz = maior(h1x, h2x, h3x);
                int custototal = diferencamatriz + altura + 1;

                Estado estadonovo = new Estado(matriz, altura + 1);
                open_list.put(custototal, stringmatriz4);

                processados.put(stringmatriz4, estadonovo);

            }
        }
        open_list.remove(key, matrizatualx1);
    }
    return 0;

}

From source file:org.commoncrawl.util.NodeAffinityMaskBuilder.java

public static String buildNodeAffinityMask(FileSystem fileSystem, Path partFileDirectory,
        Map<Integer, String> optionalRootMapHint, Set<String> excludedNodeList, int maxReducersPerNode,
        boolean skipBalance) throws IOException {

    TreeMap<Integer, String> partitionToNodeMap = new TreeMap<Integer, String>();
    FileStatus paths[] = fileSystem.globStatus(new Path(partFileDirectory, "part-*"));

    if (paths.length == 0) {
        throw new IOException("Invalid source Path:" + partFileDirectory);
    }/* ww  w  . ja  v  a2 s .co  m*/

    Multimap<String, Integer> inverseMap = TreeMultimap.create();
    Map<Integer, List<String>> paritionToDesiredCandidateList = new TreeMap<Integer, List<String>>();

    // iterate paths 
    for (FileStatus path : paths) {

        String currentFile = path.getPath().getName();
        int partitionNumber;
        try {
            if (currentFile.startsWith("part-r")) {
                partitionNumber = NUMBER_FORMAT.parse(currentFile.substring("part-r-".length())).intValue();
            } else {
                partitionNumber = NUMBER_FORMAT.parse(currentFile.substring("part-".length())).intValue();
            }
        } catch (ParseException e) {
            throw new IOException("Invalid Part Name Encountered:" + currentFile);
        }

        // get block locations 
        BlockLocation locations[] = fileSystem.getFileBlockLocations(path, 0, path.getLen());

        // if passed in root map is not null, then validate that all blocks for the current file reside on the desired node 
        if (optionalRootMapHint != null) {
            // the host all blocks should reside on 
            String desiredHost = optionalRootMapHint.get(partitionNumber);

            ArrayList<String> misplacedBlocks = new ArrayList<String>();
            // ok walk all blocks 
            for (BlockLocation location : locations) {
                boolean found = false;
                for (String host : location.getHosts()) {
                    if (host.compareTo(desiredHost) == 0) {
                        found = true;
                        break;
                    }
                }
                if (!found) {
                    misplacedBlocks.add("Block At:" + location.getOffset() + " for File:" + path.getPath()
                            + " did not contain desired location:" + desiredHost);
                }

            }
            // ok pass test at a certain threshold 
            if (misplacedBlocks.size() != 0
                    && ((float) misplacedBlocks.size() / (float) locations.length) > .50f) {
                LOG.error("Misplaced Blocks Exceed Threshold");
                for (String misplacedBlock : misplacedBlocks) {
                    LOG.error(misplacedBlock);
                }
                // TODO: SKIP THIS STEP FOR NOW ??? 
                //throw new IOException("Misplaced Blocks Exceed Threshold!");
            }
            partitionToNodeMap.put(partitionNumber, desiredHost);
        } else {
            if (excludedNodeList != null) {
                // LOG.info("Exclued Node List is:" + Lists.newArrayList(excludedNodeList).toString());
            }
            // ok ask file system for block locations
            TreeMap<String, Integer> nodeToBlockCount = new TreeMap<String, Integer>();

            for (BlockLocation location : locations) {
                for (String host : location.getHosts()) {
                    if (excludedNodeList == null || !excludedNodeList.contains(host)) {
                        Integer nodeHitCount = nodeToBlockCount.get(host);
                        if (nodeHitCount == null) {
                            nodeToBlockCount.put(host, 1);
                        } else {
                            nodeToBlockCount.put(host, nodeHitCount.intValue() + 1);
                        }
                    }
                }
            }

            if (nodeToBlockCount.size() == 0) {
                throw new IOException("No valid nodes found for partition number:" + path);
            }

            Map.Entry<String, Integer> entries[] = nodeToBlockCount.entrySet().toArray(new Map.Entry[0]);
            Arrays.sort(entries, new Comparator<Map.Entry<String, Integer>>() {

                @Override
                public int compare(Entry<String, Integer> o1, Entry<String, Integer> o2) {
                    return o1.getValue().intValue() < o2.getValue().intValue() ? 1
                            : o1.getValue().intValue() == o2.getValue().intValue() ? 0 : -1;
                }
            });

            // build a list of nodes by priority ... 
            List<String> nodesByPriority = Lists.transform(Lists.newArrayList(entries),
                    new Function<Map.Entry<String, Integer>, String>() {

                        @Override
                        public String apply(Entry<String, Integer> entry) {
                            return entry.getKey();
                        }
                    });

            // stash it away ... 
            paritionToDesiredCandidateList.put(partitionNumber, nodesByPriority);
            //LOG.info("Mapping Partition:" + partitionNumber + " To Node:" + entries[0].getKey() + " BlockCount" + entries[0].getValue().intValue());
            partitionToNodeMap.put(partitionNumber, entries[0].getKey());
            // store the inverse mapping ... 
            inverseMap.put(entries[0].getKey(), partitionNumber);
        }
    }

    if (skipBalance) {
        // walk partition map to make sure everything is assigned ...
        /*
        for (String node : inverseMap.keys()) { 
          if (inverseMap.get(node).size() > maxReducersPerNode) { 
            throw new IOException("Node:" + node + " has too many partitions! ("+inverseMap.get(node).size());
          }
        }
        */
    }

    // now if optional root map hint is null 
    if (optionalRootMapHint == null && !skipBalance) {
        // figure out if there is an imbalance
        int avgRegionsPerNode = (int) Math.floor((float) paths.length / (float) inverseMap.keySet().size());
        int maxRegionsPerNode = (int) Math.ceil((float) paths.length / (float) inverseMap.keySet().size());
        LOG.info("Attempting to ideally balance nodes. Avg paritions per node:" + avgRegionsPerNode);

        // two passes .. 
        for (int pass = 0; pass < 2; ++pass) {
            LOG.info("Pass:" + pass);
            // iterate nodes ... 
            for (String node : ImmutableSet.copyOf(inverseMap.keySet())) {
                // get paritions in map  
                Collection<Integer> paritions = ImmutableList.copyOf(inverseMap.get(node));
                // if parition count exceeds desired average ... 
                if (paritions.size() > maxRegionsPerNode) {
                    // first pass, assign based on preference 
                    if (pass == 0) {
                        LOG.info("Node:" + node + " parition count:" + paritions.size() + " exceeds avg:"
                                + avgRegionsPerNode);
                        // walk partitions trying to find a node to discrard the parition to 
                        for (int partition : paritions) {
                            for (String candidate : paritionToDesiredCandidateList.get(partition)) {
                                if (!candidate.equals(node)) {
                                    // see if this candidate has room ..
                                    if (inverseMap.get(candidate).size() < avgRegionsPerNode) {
                                        LOG.info("REASSIGNING parition:" + partition + " from Node:" + node
                                                + " to Node:" + candidate);
                                        // found match reassign it ... 
                                        inverseMap.remove(node, partition);
                                        inverseMap.put(candidate, partition);
                                        break;
                                    }
                                }
                            }
                            // break out if reach our desired number of paritions for this node 
                            if (inverseMap.get(node).size() == avgRegionsPerNode)
                                break;
                        }
                    }
                    // second pass ... assign based on least loaded node ... 
                    else {
                        int desiredRelocations = paritions.size() - maxRegionsPerNode;
                        LOG.info("Desired Relocation for node:" + node + ":" + desiredRelocations
                                + " partitions:" + paritions.size());
                        for (int i = 0; i < desiredRelocations; ++i) {
                            String leastLoadedNode = null;
                            int leastLoadedNodePartitionCount = 0;

                            for (String candidateNode : inverseMap.keySet()) {
                                if (leastLoadedNode == null || inverseMap.get(candidateNode)
                                        .size() < leastLoadedNodePartitionCount) {
                                    leastLoadedNode = candidateNode;
                                    leastLoadedNodePartitionCount = inverseMap.get(candidateNode).size();
                                }
                            }
                            int bestPartition = -1;
                            int bestParitionOffset = -1;

                            for (int candidateParition : inverseMap.get(node)) {
                                int offset = 0;
                                for (String nodeCandidate : paritionToDesiredCandidateList
                                        .get(candidateParition)) {
                                    if (nodeCandidate.equals(leastLoadedNode)) {
                                        if (bestPartition == -1 || bestParitionOffset > offset) {
                                            bestPartition = candidateParition;
                                            bestParitionOffset = offset;
                                        }
                                        break;
                                    }
                                    offset++;
                                }
                            }
                            if (bestPartition == -1) {
                                bestPartition = Iterables.get(inverseMap.get(node), 0);
                            }
                            LOG.info("REASSIGNING parition:" + bestPartition + " from Node:" + node
                                    + " to Node:" + leastLoadedNode);
                            // found match reassign it ... 
                            inverseMap.remove(node, bestPartition);
                            inverseMap.put(leastLoadedNode, bestPartition);
                        }
                    }
                }
            }
        }
        LOG.info("Rebuilding parition to node map based on ideal balance");
        for (String node : inverseMap.keySet()) {
            LOG.info("Node:" + node + " has:" + inverseMap.get(node).size() + " partitions:"
                    + inverseMap.get(node).toString());
        }

        partitionToNodeMap.clear();
        for (Map.Entry<String, Integer> entry : inverseMap.entries()) {
            partitionToNodeMap.put(entry.getValue(), entry.getKey());
        }
    }

    StringBuilder builder = new StringBuilder();
    int itemCount = 0;
    for (Map.Entry<Integer, String> entry : partitionToNodeMap.entrySet()) {
        if (itemCount++ != 0)
            builder.append("\t");
        builder.append(entry.getKey().intValue() + "," + entry.getValue());
    }

    return builder.toString();
}

From source file:org.apache.hadoop.hive.ql.optimizer.SharedWorkOptimizer.java

@Override
public ParseContext transform(ParseContext pctx) throws SemanticException {

    final Map<String, TableScanOperator> topOps = pctx.getTopOps();
    if (topOps.size() < 2) {
        // Nothing to do, bail out
        return pctx;
    }/* w  ww.ja  va  2  s  .  c o m*/

    if (LOG.isDebugEnabled()) {
        LOG.debug("Before SharedWorkOptimizer:\n" + Operator.toString(pctx.getTopOps().values()));
    }

    // Cache to use during optimization
    SharedWorkOptimizerCache optimizerCache = new SharedWorkOptimizerCache();

    // Gather information about the DPP table scans and store it in the cache
    gatherDPPTableScanOps(pctx, optimizerCache);

    // Map of dbName.TblName -> TSOperator
    Multimap<String, TableScanOperator> tableNameToOps = splitTableScanOpsByTable(pctx);

    // We enforce a certain order when we do the reutilization.
    // In particular, we use size of table x number of reads to
    // rank the tables.
    List<Entry<String, Long>> sortedTables = rankTablesByAccumulatedSize(pctx);
    LOG.debug("Sorted tables by size: {}", sortedTables);

    // Execute optimization
    Multimap<String, TableScanOperator> existingOps = ArrayListMultimap.create();
    Set<Operator<?>> removedOps = new HashSet<>();
    for (Entry<String, Long> tablePair : sortedTables) {
        String tableName = tablePair.getKey();
        for (TableScanOperator discardableTsOp : tableNameToOps.get(tableName)) {
            if (removedOps.contains(discardableTsOp)) {
                LOG.debug("Skip {} as it has been already removed", discardableTsOp);
                continue;
            }
            Collection<TableScanOperator> prevTsOps = existingOps.get(tableName);
            for (TableScanOperator retainableTsOp : prevTsOps) {
                if (removedOps.contains(retainableTsOp)) {
                    LOG.debug("Skip {} as it has been already removed", retainableTsOp);
                    continue;
                }

                // First we quickly check if the two table scan operators can actually be merged
                boolean mergeable = areMergeable(pctx, optimizerCache, retainableTsOp, discardableTsOp);
                if (!mergeable) {
                    // Skip
                    LOG.debug("{} and {} cannot be merged", retainableTsOp, discardableTsOp);
                    continue;
                }

                // Secondly, we extract information about the part of the tree that can be merged
                // as well as some structural information (memory consumption) that needs to be
                // used to determined whether the merge can happen
                SharedResult sr = extractSharedOptimizationInfo(pctx, optimizerCache, retainableTsOp,
                        discardableTsOp);

                // It seems these two operators can be merged.
                // Check that plan meets some preconditions before doing it.
                // In particular, in the presence of map joins in the upstream plan:
                // - we cannot exceed the noconditional task size, and
                // - if we already merged the big table, we cannot merge the broadcast
                // tables.
                if (!validPreConditions(pctx, optimizerCache, sr)) {
                    // Skip
                    LOG.debug("{} and {} do not meet preconditions", retainableTsOp, discardableTsOp);
                    continue;
                }

                // We can merge
                if (sr.retainableOps.size() > 1) {
                    // More than TS operator
                    Operator<?> lastRetainableOp = sr.retainableOps.get(sr.retainableOps.size() - 1);
                    Operator<?> lastDiscardableOp = sr.discardableOps.get(sr.discardableOps.size() - 1);
                    if (lastDiscardableOp.getNumChild() != 0) {
                        List<Operator<? extends OperatorDesc>> allChildren = Lists
                                .newArrayList(lastDiscardableOp.getChildOperators());
                        for (Operator<? extends OperatorDesc> op : allChildren) {
                            lastDiscardableOp.getChildOperators().remove(op);
                            op.replaceParent(lastDiscardableOp, lastRetainableOp);
                            lastRetainableOp.getChildOperators().add(op);
                        }
                    }

                    LOG.debug("Merging subtree starting at {} into subtree starting at {}", discardableTsOp,
                            retainableTsOp);
                } else {
                    // Only TS operator
                    ExprNodeGenericFuncDesc exprNode = null;
                    if (retainableTsOp.getConf().getFilterExpr() != null) {
                        // Push filter on top of children
                        pushFilterToTopOfTableScan(optimizerCache, retainableTsOp);
                        // Clone to push to table scan
                        exprNode = (ExprNodeGenericFuncDesc) retainableTsOp.getConf().getFilterExpr();
                    }
                    if (discardableTsOp.getConf().getFilterExpr() != null) {
                        // Push filter on top
                        pushFilterToTopOfTableScan(optimizerCache, discardableTsOp);
                        ExprNodeGenericFuncDesc tsExprNode = discardableTsOp.getConf().getFilterExpr();
                        if (exprNode != null && !exprNode.isSame(tsExprNode)) {
                            // We merge filters from previous scan by ORing with filters from current scan
                            if (exprNode.getGenericUDF() instanceof GenericUDFOPOr) {
                                List<ExprNodeDesc> newChildren = new ArrayList<>(
                                        exprNode.getChildren().size() + 1);
                                for (ExprNodeDesc childExprNode : exprNode.getChildren()) {
                                    if (childExprNode.isSame(tsExprNode)) {
                                        // We do not need to do anything, it is in the OR expression
                                        break;
                                    }
                                    newChildren.add(childExprNode);
                                }
                                if (exprNode.getChildren().size() == newChildren.size()) {
                                    newChildren.add(tsExprNode);
                                    exprNode = ExprNodeGenericFuncDesc.newInstance(new GenericUDFOPOr(),
                                            newChildren);
                                }
                            } else {
                                exprNode = ExprNodeGenericFuncDesc.newInstance(new GenericUDFOPOr(),
                                        Arrays.<ExprNodeDesc>asList(exprNode, tsExprNode));
                            }
                        }
                    }
                    // Replace filter
                    retainableTsOp.getConf().setFilterExpr(exprNode);
                    // Replace table scan operator
                    List<Operator<? extends OperatorDesc>> allChildren = Lists
                            .newArrayList(discardableTsOp.getChildOperators());
                    for (Operator<? extends OperatorDesc> op : allChildren) {
                        discardableTsOp.getChildOperators().remove(op);
                        op.replaceParent(discardableTsOp, retainableTsOp);
                        retainableTsOp.getChildOperators().add(op);
                    }

                    LOG.debug("Merging {} into {}", discardableTsOp, retainableTsOp);
                }

                // First we remove the input operators of the expression that
                // we are going to eliminate
                for (Operator<?> op : sr.discardableInputOps) {
                    OperatorUtils.removeOperator(op);
                    optimizerCache.removeOp(op);
                    removedOps.add(op);
                    // Remove DPP predicates
                    if (op instanceof ReduceSinkOperator) {
                        SemiJoinBranchInfo sjbi = pctx.getRsToSemiJoinBranchInfo().get(op);
                        if (sjbi != null && !sr.discardableOps.contains(sjbi.getTsOp())
                                && !sr.discardableInputOps.contains(sjbi.getTsOp())) {
                            GenTezUtils.removeSemiJoinOperator(pctx, (ReduceSinkOperator) op, sjbi.getTsOp());
                        }
                    } else if (op instanceof AppMasterEventOperator) {
                        DynamicPruningEventDesc dped = (DynamicPruningEventDesc) op.getConf();
                        if (!sr.discardableOps.contains(dped.getTableScan())
                                && !sr.discardableInputOps.contains(dped.getTableScan())) {
                            GenTezUtils.removeSemiJoinOperator(pctx, (AppMasterEventOperator) op,
                                    dped.getTableScan());
                        }
                    }
                    LOG.debug("Input operator removed: {}", op);
                }
                // Then we merge the operators of the works we are going to merge
                optimizerCache.removeOpAndCombineWork(discardableTsOp, retainableTsOp);
                removedOps.add(discardableTsOp);
                // Finally we remove the expression from the tree
                for (Operator<?> op : sr.discardableOps) {
                    OperatorUtils.removeOperator(op);
                    optimizerCache.removeOp(op);
                    removedOps.add(op);
                    if (sr.discardableOps.size() == 1) {
                        // If there is a single discardable operator, it is a TableScanOperator
                        // and it means that we have merged filter expressions for it. Thus, we
                        // might need to remove DPP predicates from the retainable TableScanOperator
                        Collection<Operator<?>> c = optimizerCache.tableScanToDPPSource
                                .get((TableScanOperator) op);
                        for (Operator<?> dppSource : c) {
                            if (dppSource instanceof ReduceSinkOperator) {
                                GenTezUtils.removeSemiJoinOperator(pctx, (ReduceSinkOperator) dppSource,
                                        (TableScanOperator) sr.retainableOps.get(0));
                            } else if (dppSource instanceof AppMasterEventOperator) {
                                GenTezUtils.removeSemiJoinOperator(pctx, (AppMasterEventOperator) dppSource,
                                        (TableScanOperator) sr.retainableOps.get(0));
                            }
                        }
                    }
                    LOG.debug("Operator removed: {}", op);
                }

                break;
            }

            if (removedOps.contains(discardableTsOp)) {
                // This operator has been removed, remove it from the list of existing operators
                existingOps.remove(tableName, discardableTsOp);
            } else {
                // This operator has not been removed, include it in the list of existing operators
                existingOps.put(tableName, discardableTsOp);
            }
        }
    }

    // Remove unused table scan operators
    Iterator<Entry<String, TableScanOperator>> it = topOps.entrySet().iterator();
    while (it.hasNext()) {
        Entry<String, TableScanOperator> e = it.next();
        if (e.getValue().getNumChild() == 0) {
            it.remove();
        }
    }

    if (LOG.isDebugEnabled()) {
        LOG.debug("After SharedWorkOptimizer:\n" + Operator.toString(pctx.getTopOps().values()));
    }

    return pctx;
}