Example usage for java.util BitSet BitSet

List of usage examples for java.util BitSet BitSet

Introduction

In this page you can find the example usage for java.util BitSet BitSet.

Prototype

public BitSet() 

Source Link

Document

Creates a new bit set.

Usage

From source file:com.opengamma.analytics.financial.model.volatility.surface.VolatilitySurfaceFitter.java

public LeastSquareResultsWithTransform solve(final DoubleMatrix1D start) {
    final LeastSquareResults lsRes = SOLVER.solve(_vols, _errors, getModelValueFunction(),
            getModelJacobianFunction(), start);
    return new LeastSquareResultsWithTransform(lsRes,
            new UncoupledParameterTransforms(start, getTransforms(), new BitSet()));
}

From source file:dr.app.tools.AntigenicPlotter.java

public AntigenicPlotter(int burnin, boolean tabFormat, boolean discreteModel, final String inputFileName,
        final String treeFileName, final String outputFileName) throws IOException {

    double[][] reference = null;
    List<String> tipLabels = null;

    if (treeFileName != null) {
        System.out.println("Reading tree file...");

        NexusImporter importer = new NexusImporter(new FileReader(treeFileName));
        try {/*from w w w . ja v a 2  s.  c o m*/
            Tree tree = importer.importNextTree();

            reference = new double[tree.getExternalNodeCount()][2];
            tipLabels = new ArrayList<String>();

            for (int i = 0; i < tree.getExternalNodeCount(); i++) {
                NodeRef tip = tree.getExternalNode(i);
                tipLabels.add(tree.getNodeTaxon(tip).getId());

                reference[i][0] = (Double) tree.getNodeAttribute(tip, "antigenic1");
                reference[i][1] = (Double) tree.getNodeAttribute(tip, "antigenic2");
            }
        } catch (Importer.ImportException e) {
            e.printStackTrace();
            return;
        }
    }

    System.out.println("Reading log file...");

    FileReader fileReader = new FileReader(inputFileName);
    try {
        File file = new File(inputFileName);

        LogFileTraces traces = new LogFileTraces(inputFileName, file);
        traces.loadTraces();

        if (burnin == -1) {
            burnin = (int) (traces.getMaxState() / 10);
        }

        traces.setBurnIn(burnin);

        System.out.println();
        System.out.println("burnIn   <= " + burnin);
        System.out.println("maxState  = " + traces.getMaxState());
        System.out.println();

        int traceCount = traces.getTraceCount();
        if (discreteModel) {
            // for the discrete model, there are 4 sets of traces, pairs coordinates, cluster allocations, and cluster sizes
            traceCount /= 4;
        } else {
            // for continuous, just pairs of coordinates
            traceCount /= 2;
        }

        int stateCount = traces.getStateCount();

        double[][][] data;
        String[] labels = new String[traceCount];

        if (tipLabels != null) {
            data = new double[stateCount][tipLabels.size()][2];
        } else {
            data = new double[stateCount][traceCount][2];
        }

        for (int i = 0; i < traceCount; i++) {
            String name = traces.getTraceName(i * 2);
            name = name.substring(0, name.length() - 1);

            if (tipLabels != null) {
                int index = tipLabels.indexOf(name);
                if (index != -1) {
                    for (int j = 0; j < stateCount; j++) {
                        data[j][index][0] = traces.getStateValue(i * 2, j);
                        data[j][index][1] = traces.getStateValue((i * 2) + 1, j);
                    }
                }
            } else {
                for (int j = 0; j < stateCount; j++) {
                    data[j][i][0] = traces.getStateValue(i * 2, j);
                    data[j][i][1] = traces.getStateValue((i * 2) + 1, j);
                }
                labels[i] = name;
            }
        }

        int[][] clusterIndices = null;
        int[][] clusterSizes = null;

        if (discreteModel) {
            clusterIndices = new int[stateCount][traceCount];
            clusterSizes = new int[stateCount][traceCount];

            for (int i = 0; i < traceCount; i++) {
                for (int j = 0; j < stateCount; j++) {
                    clusterIndices[j][i] = (int) traces.getStateValue((traceCount * 2) + i, j);
                    clusterSizes[j][i] = (int) traces.getStateValue((traceCount * 3) + i, j);
                }
            }

            Map<BitSet, Integer> clusterMap = new HashMap<BitSet, Integer>();

            for (int i = 0; i < stateCount; i++) {
                BitSet[] clusters = new BitSet[clusterIndices[i].length];
                for (int j = 0; j < clusterIndices[i].length; j++) {
                    BitSet bits = clusters[clusterIndices[i][j]];

                    if (bits == null) {
                        bits = new BitSet();
                        clusters[clusterIndices[i][j]] = bits;
                    }
                    bits.set(j);

                    Integer count = clusterMap.get(bits);
                    if (count == null) {
                        count = 0;
                    }
                    clusterMap.put(bits, count + 1);
                }

                Arrays.sort(clusters, new Comparator<BitSet>() {
                    public int compare(BitSet bitSet1, BitSet bitSet2) {
                        if (bitSet1 == null) {
                            return -1;
                        }
                        if (bitSet2 == null) {
                            return 1;
                        }
                        return bitSet2.cardinality() - bitSet1.cardinality();
                    }
                });
            }

            for (BitSet bits : clusterMap.keySet()) {
                int count = clusterMap.get(bits);
                if (count > 1) {
                    System.out.print(count);
                    for (int i = bits.nextSetBit(0); i >= 0; i = bits.nextSetBit(i + 1)) {
                        System.out.print("\t" + labels[i]);
                    }
                    System.out.println();
                }
            }
        }

        if (tipLabels != null) {
            labels = new String[tipLabels.size()];
            tipLabels.toArray(labels);
        }

        if (reference != null) {
            procrustinate(data, reference);
        } else {
            procrustinate(data);
        }

        if (tabFormat) {
            writeTabformat(outputFileName, labels, data);
        } else {
            if (discreteModel) {
                writeKML(outputFileName, labels, data, clusterIndices, clusterSizes);
            } else {
                writeKML(outputFileName, labels, data);
            }
        }

    } catch (Exception e) {
        System.err.println("Error Parsing Input File: " + e.getMessage());

        e.printStackTrace(System.err);
        return;
    }
    fileReader.close();

}

From source file:org.apache.tez.runtime.library.output.OrderedPartitionedKVOutput.java

protected List<Event> generateEventsOnClose() throws IOException {
    DataMovementEventPayloadProto.Builder payloadBuilder = DataMovementEventPayloadProto.newBuilder();

    boolean outputGenerated = true;
    if (sendEmptyPartitionDetails) {
        Path indexFile = sorter.getMapOutput().getOutputIndexFile();
        TezSpillRecord spillRecord = new TezSpillRecord(indexFile, conf);
        BitSet emptyPartitionDetails = new BitSet();
        int emptyPartitions = 0;
        for (int i = 0; i < spillRecord.size(); i++) {
            TezIndexRecord indexRecord = spillRecord.getIndex(i);
            if (!indexRecord.hasData()) {
                emptyPartitionDetails.set(i);
                emptyPartitions++;/*from  w  w w  .  j a  v  a  2 s  .  c  o  m*/
            }
        }
        outputGenerated = (spillRecord.size() != emptyPartitions);
        if (emptyPartitions > 0) {
            ByteString emptyPartitionsBytesString = TezCommonUtils
                    .compressByteArrayToByteString(TezUtilsInternal.toByteArray(emptyPartitionDetails));
            payloadBuilder.setEmptyPartitions(emptyPartitionsBytesString);
            LOG.info("EmptyPartition bitsetSize=" + emptyPartitionDetails.cardinality() + ", numOutputs="
                    + getNumPhysicalOutputs() + ", emptyPartitions=" + emptyPartitions + ", compressedSize="
                    + emptyPartitionsBytesString.size());
        }
    }
    if (!sendEmptyPartitionDetails || outputGenerated) {
        String host = System.getenv(ApplicationConstants.Environment.NM_HOST.toString());
        ByteBuffer shuffleMetadata = getContext()
                .getServiceProviderMetaData(ShuffleUtils.SHUFFLE_HANDLER_SERVICE_ID);
        int shufflePort = ShuffleUtils.deserializeShuffleProviderMetaData(shuffleMetadata);
        payloadBuilder.setHost(host);
        payloadBuilder.setPort(shufflePort);
        payloadBuilder.setPathComponent(getContext().getUniqueIdentifier());
    }

    payloadBuilder.setRunDuration((int) ((endTime - startTime) / 1000));
    DataMovementEventPayloadProto payloadProto = payloadBuilder.build();
    ByteBuffer payload = payloadProto.toByteString().asReadOnlyByteBuffer();

    long outputSize = getContext().getCounters().findCounter(TaskCounter.OUTPUT_BYTES).getValue();
    VertexManagerEventPayloadProto.Builder vmBuilder = VertexManagerEventPayloadProto.newBuilder();
    vmBuilder.setOutputSize(outputSize);
    VertexManagerEvent vmEvent = VertexManagerEvent.create(getContext().getDestinationVertexName(),
            vmBuilder.build().toByteString().asReadOnlyByteBuffer());

    List<Event> events = Lists.newArrayListWithCapacity(getNumPhysicalOutputs() + 1);
    events.add(vmEvent);

    CompositeDataMovementEvent csdme = CompositeDataMovementEvent.create(0, getNumPhysicalOutputs(), payload);
    events.add(csdme);

    return events;
}

From source file:org.lockss.plugin.base.TestDefaultUrlCacher.java

void setSuppressValidation(UrlCacher uc) {
    BitSet fetchFlags = new BitSet();
    fetchFlags.set(UrlCacher.SUPPRESS_CONTENT_VALIDATION);
    uc.setFetchFlags(fetchFlags);
}

From source file:com.opengamma.analytics.financial.model.volatility.smile.fitting.SmileModelFitterTest.java

public void horribleMarketDataTest() {
    final double forward = 0.0059875;
    final double[] strikes = new double[] { 0.0012499999999999734, 0.0024999999999999467, 0.003750000000000031,
            0.0050000000000000044, 0.006249999999999978, 0.007499999999999951, 0.008750000000000036,
            0.010000000000000009, 0.011249999999999982, 0.012499999999999956, 0.01375000000000004,
            0.015000000000000013, 0.016249999999999987, 0.01749999999999996, 0.018750000000000044,
            0.020000000000000018, 0.02124999999999999, 0.022499999999999964, 0.02375000000000005,
            0.025000000000000022, 0.026249999999999996, 0.02749999999999997, 0.028750000000000053,
            0.030000000000000027 };/*from   www.  jav  a 2 s. com*/
    final double expiry = 0.09041095890410959;
    final double[] vols = new double[] { 2.7100433855959642, 1.5506135190088546, 0.9083977239618538,
            0.738416513934868, 0.8806973450124451, 1.0906290439592792, 1.2461975189027226, 1.496275983572826,
            1.5885915338673156, 1.4842142974195722, 1.7667347426399058, 1.4550288621444052, 1.0651798188736166,
            1.143318270172714, 1.216215092528441, 1.2845258218014657, 1.3488224665755535, 1.9259326343836376,
            1.9868728791190922, 2.0441767092857317, 2.0982583238541026, 2.1494622372820675, 2.198020785622251,
            2.244237863291375 };
    final int n = strikes.length;
    final double[] errors = new double[n];
    Arrays.fill(errors, 0.01); //1% error
    final SmileModelFitter<T> fitter = getFitter(forward, strikes, expiry, vols, errors, getModel());
    LeastSquareResults best = null;
    final BitSet fixed = new BitSet();
    for (int i = 0; i < 5; i++) {
        final double[] start = getRandomStartValues();

        //   int nStartPoints = start.length;
        final LeastSquareResults lsRes = fitter.solve(new DoubleMatrix1D(start), fixed);
        //     System.out.println(this.toString() + lsRes.toString());
        if (best == null) {
            best = lsRes;
        } else {
            if (lsRes.getChiSq() < best.getChiSq()) {
                best = lsRes;
            }
        }
    }
    //
    //    Function1D<DoubleMatrix1D, DoubleMatrix2D> jacFunc = fitter.getModelJacobianFunction();
    //    System.out.println("model Jac: " + jacFunc.evaluate(best.getParameters()));
    //    System.out.println("fit invJac: " + best.getInverseJacobian());
    //    System.out.println("best" + this.toString() + best.toString());
    if (best != null) {
        assertTrue("chi square", best.getChiSq() < 24000); //average error 31.6% - not a good fit, but the data is horrible
    }
}

From source file:sf.net.experimaestro.manager.plans.Plan.java

/**
 * Returns the graph corresponding to this plan
 *
 * @param map The current plan path (containg joins in input, and operators in output)
 * @return The node that is the root (sink) of the DAG
 *//* w w w. j a v  a2  s .  co  m*/
public synchronized Operator prepare(Map<Operator, Operator> map, OperatorMap opMap) {
    // Check if a plan was not already generated
    Operator old = map.get(this);
    if (old != null)
        return old;

    // Outputs will contain the list of operators that have
    // to be merged (because we have a series of different inputs)
    ArrayList<Operator> outputs = new ArrayList<>();

    for (Multimap<DotName, Operator> inputs : inputsList) {
        TaskOperator self = new TaskOperator(this);

        if (inputs.isEmpty()) {
            self.addParent(new Constant(JsonNull.getSingleton()));
            self.setMappings(ImmutableMap.of());
            outputs.add(self);
        } else {
            // --- Loop over the cartesian product of the inputs
            DotName ids[] = new DotName[inputs.keySet().size()];
            OperatorIterable inputValues[] = new OperatorIterable[inputs.keySet().size()];
            {

                int index = 0;
                for (Map.Entry<DotName, Collection<Operator>> input : inputs.asMap().entrySet()) {
                    ids[index] = input.getKey();
                    inputValues[index] = new OperatorIterable(input.getValue(), map, opMap);
                    index++;
                }
                assert index == ids.length;
            }

            // Create a new operator
            Operator inputOperators[] = new Operator[inputValues.length];

            for (int i = inputValues.length; --i >= 0;) {
                OperatorIterable values = inputValues[i];
                Union union = new Union();
                for (Operator operator : values) {
                    union.addParent(operator);
                }

                if (union.getParents().size() == 1)
                    inputOperators[i] = union.getParent(0);
                else
                    inputOperators[i] = union;

                opMap.add(inputOperators[i]);

            }

            // Find LCAs and store them in a map operator ID -> inputs
            // joins contains the list of pairwise LCAs in the operator
            // graph above
            BitSet[] joins = new BitSet[inputOperators.length];
            for (int i = 0; i < joins.length; i++) {
                joins[i] = new BitSet();
            }

            for (int i = 0; i < ids.length - 1; i++) {
                for (int j = i + 1; j < ids.length; j++) {
                    ArrayList<Operator> lca = opMap.findLCAs(inputOperators[i], inputOperators[j]);
                    for (Operator operator : lca) {
                        int key = opMap.get(operator);
                        joins[i].set(key);
                        joins[j].set(key);
                    }
                }
            }

            Lattice lattice = new Lattice(opMap);
            for (int i = 0; i < joins.length; i++) {
                lattice.add(joins[i], inputOperators[i]);
            }
            LatticeNode.MergeResult merge = lattice.merge();

            self.addParent(merge.operator);

            // Associate streams with names
            Map<DotName, Integer> mappings = new TreeMap<>();
            for (int i = 0; i < ids.length; i++) {
                mappings.put(ids[i], merge.map.get(inputOperators[i]));
            }
            self.setMappings(mappings);

            // --- Handle group by

            outputs.add(self);
        }
    }

    // End of loop over inputs

    Operator planOperator;
    if (outputs.size() == 1) {
        map.put(this, outputs.get(0));
        planOperator = outputs.get(0);
    } else {
        Union union = new Union();
        map.put(this, union);
        for (Operator output : outputs)
            union.addParent(output);
        planOperator = union;
    }

    return planOperator;

}

From source file:org.asoem.greyfish.utils.collect.BitString.java

public static BitString create(final Iterable<Boolean> val) {
    checkNotNull(val);
    final BitSet bs = new BitSet();
    int idx = 0;//from w w w.jav  a 2s.  com
    int cardinality = 0;
    for (final boolean b : val) {
        bs.set(idx++, b);
        if (b) {
            ++cardinality;
        }
    }
    return create(bs, idx, cardinality);
}

From source file:com.bittorrent.mpetazzoni.client.SharedTorrent.java

/**
 * Create a new shared torrent from meta-info binary data.
 *
 * @param torrent The meta-info byte data.
 * @param parent The parent directory or location the torrent files.
 * @param seeder Whether we're a seeder for this torrent or not (disables
 * validation)./*from   w ww .ja v a  2 s  .  com*/
 * @throws FileNotFoundException If the torrent file location or
 * destination directory does not exist and can't be created.
 * @throws IOException If the torrent file cannot be read or decoded.
 */
public SharedTorrent(byte[] torrent, File parent, boolean seeder) throws FileNotFoundException, IOException {
    super(torrent, seeder);

    if (parent == null || !parent.isDirectory()) {
        throw new IllegalArgumentException("Invalid parent directory!");
    }

    String parentPath = parent.getCanonicalPath();

    try {
        this.pieceLength = this.decoded_info.get("piece length").getInt();
        this.piecesHashes = ByteBuffer.wrap(this.decoded_info.get("pieces").getBytes());

        if (this.piecesHashes.capacity() / Torrent.PIECE_HASH_SIZE * (long) this.pieceLength < this.getSize()) {
            throw new IllegalArgumentException(
                    "Torrent size does not " + "match the number of pieces and the piece size!");
        }
    } catch (InvalidBEncodingException ibee) {
        throw new IllegalArgumentException("Error reading torrent meta-info fields!");
    }

    List<FileStorage> files = new LinkedList<FileStorage>();
    long offset = 0L;
    for (Torrent.TorrentFile file : this.files) {
        File actual = new File(parent, file.file.getPath());

        if (!actual.getCanonicalPath().startsWith(parentPath)) {
            throw new SecurityException("Torrent file path attempted " + "to break directory jail!");
        }

        actual.getParentFile().mkdirs();
        files.add(new FileStorage(actual, offset, file.size));
        offset += file.size;
    }
    this.bucket = new FileCollectionStorage(files, this.getSize());

    this.random = new Random(System.currentTimeMillis());
    this.stop = false;

    this.uploaded = 0;
    this.downloaded = 0;
    this.left = this.getSize();

    this.initialized = false;
    this.pieces = new Piece[0];
    this.rarest = Collections.synchronizedSortedSet(new TreeSet<Piece>());
    this.completedPieces = new BitSet();
    this.requestedPieces = new BitSet();
}

From source file:org.lockss.servlet.DisplayContentStatus.java

/**
 * Handle a request/*from   w  ww.  ja  va  2s.c om*/
 *
 * @throws IOException
 */
public void lockssHandleRequest() throws IOException {
    if (!StringUtil.isNullString(req.getParameter("isDaemonReady"))) {
        if (pluginMgr.areAusStarted()) {
            resp.setStatus(200);
            PrintWriter wrtr = resp.getWriter();
            resp.setContentType("text/plain");
            wrtr.println("true");
        } else {
            PrintWriter wrtr = resp.getWriter();
            resp.setContentType("text/plain");
            wrtr.println("false");
            resp.sendError(202, "Not ready");
        }
        return;
    }

    action = req.getParameter(ACTION_TAG);
    auName = req.getParameter(AU_TO_REMOVE);
    if ("Delete selected".equals(req.getParameter("submit"))) {
        String[] deleteAUs = req.getParameterValues("deleteAu");
        if (deleteAUs != null) {
            log.error("AUs: " + Arrays.asList(deleteAUs));
            doRemoveAus(Arrays.asList(deleteAUs));
        } else {
            log.error("No AUs selected");
            deleteMessage = "No AUs selected!";
        }
    }

    String publisher = req.getParameter("deletePublisher");
    if (!StringUtil.isNullString(publisher)) {
        TreeMap<String, TreeMap<String, TreeSet<ArchivalUnit>>> auMap = DisplayContentTab
                .getAusByPublisherName();
        ArrayList<String> auIds = new ArrayList<String>();
        if (auMap.containsKey(publisher)) {
            Iterator it = auMap.entrySet().iterator();
            while (it.hasNext()) {
                Map.Entry pairs = (Map.Entry) it.next();
                String publisherString = pairs.getKey().toString();
                log.error("Publisher: " + publisher);
                log.error("Publisher string: " + publisherString);
                if (publisher.equals(publisherString)) {
                    TreeMap<String, TreeSet<ArchivalUnit>> titleMap = (TreeMap<String, TreeSet<ArchivalUnit>>) pairs
                            .getValue();
                    Iterator titleIterator = titleMap.entrySet().iterator();
                    while (titleIterator.hasNext()) {
                        Map.Entry titlePairs = (Map.Entry) titleIterator.next();
                        TreeSet<ArchivalUnit> auSet = (TreeSet<ArchivalUnit>) titlePairs.getValue();
                        for (ArchivalUnit au : auSet) {
                            auIds.add(au.getAuId());
                        }
                    }
                }
            }
            doRemoveAus(auIds);
        }
    }

    if (action != null && auName != null) {
        String auString = URLDecoder.decode(auName, "UTF-8");
        java.util.List<String> auList = new ArrayList<String>();
        auList.add(auString);
        doRemoveAus(auList);
    }

    if (StringUtil.isNullString(action)) {
        try {
            getMultiPartRequest();
            if (multiReq != null) {
                action = multiReq.getString(ACTION_TAG);
            }
        } catch (FormDataTooLongException e) {
            errMsg = "Uploaded file too large: " + e.getMessage();
            // leave action null, will call displayAuSummary() below
        }
    }

    outputFmt = OUTPUT_HTML; // default output is html

    String outputParam = req.getParameter("output");
    if (!StringUtil.isNullString(outputParam)) {
        if ("html".equalsIgnoreCase(outputParam)) {
            outputFmt = OUTPUT_HTML;
        } else if ("xml".equalsIgnoreCase(outputParam)) {
            outputFmt = OUTPUT_XML;
        } else if ("text".equalsIgnoreCase(outputParam)) {
            outputFmt = OUTPUT_TEXT;
        } else if ("csv".equalsIgnoreCase(outputParam)) {
            outputFmt = OUTPUT_CSV;
        } else {
            log.warning("Unknown output format: " + outputParam);
        }
    }
    String optionsParam = req.getParameter("options");

    tableOptions = new BitSet();

    if (isDebugUser()) {
        log.debug2("Debug user.  Setting OPTION_DEBUG_USER");
        tableOptions.set(StatusTable.OPTION_DEBUG_USER);
    }

    for (Iterator iter = StringUtil.breakAt(optionsParam, ',').iterator(); iter.hasNext();) {
        String s = (String) iter.next();
        if ("norows".equalsIgnoreCase(s)) {
            tableOptions.set(StatusTable.OPTION_NO_ROWS);
        }
    }

    tableName = req.getParameter("table");
    tableKey = req.getParameter("key");
    if (StringUtil.isNullString(tableName)) {
        tableName = "AuOverview";
    }
    if (StringUtil.isNullString(tableKey)) {
        tableKey = null;
    }
    sortKey = req.getParameter("sort");
    if (StringUtil.isNullString(sortKey)) {
        sortKey = null;
    }
    groupKey = req.getParameter("group");
    if (StringUtil.isNullString(groupKey)) {
        groupKey = "publisher";
    }
    typeKey = req.getParameter("type");
    filterKey = req.getParameter("filterKey");
    tabKey = req.getParameter("tab");
    timeKey = req.getParameter("timeKey");

    switch (outputFmt) {
    case OUTPUT_HTML:
        doHtmlStatusTable();
        break;
    case OUTPUT_XML:
        try {
            doXmlStatusTable();
        } catch (Exception e) {
            throw new IOException("Error building XML", e);
        }
        break;
    case OUTPUT_TEXT:
        doTextStatusTable();
        break;
    case OUTPUT_CSV:
        doCsvStatusTable();
        break;
    }
}

From source file:org.apache.drill.exec.planner.logical.partition.PruneScanRule.java

protected void doOnMatch(RelOptRuleCall call, Filter filterRel, Project projectRel, TableScan scanRel) {

    final String pruningClassName = getClass().getName();
    logger.info("Beginning partition pruning, pruning class: {}", pruningClassName);
    Stopwatch totalPruningTime = Stopwatch.createStarted();

    final PlannerSettings settings = PrelUtil.getPlannerSettings(call.getPlanner());
    PartitionDescriptor descriptor = getPartitionDescriptor(settings, scanRel);
    final BufferAllocator allocator = optimizerContext.getAllocator();

    final Object selection = getDrillTable(scanRel).getSelection();
    MetadataContext metaContext = null;/*from   w w w .j a va 2  s. c o m*/
    if (selection instanceof FormatSelection) {
        metaContext = ((FormatSelection) selection).getSelection().getMetaContext();
    }

    RexNode condition = null;
    if (projectRel == null) {
        condition = filterRel.getCondition();
    } else {
        // get the filter as if it were below the projection.
        condition = RelOptUtil.pushFilterPastProject(filterRel.getCondition(), projectRel);
    }

    RewriteAsBinaryOperators visitor = new RewriteAsBinaryOperators(true,
            filterRel.getCluster().getRexBuilder());
    condition = condition.accept(visitor);

    Map<Integer, String> fieldNameMap = Maps.newHashMap();
    List<String> fieldNames = scanRel.getRowType().getFieldNames();
    BitSet columnBitset = new BitSet();
    BitSet partitionColumnBitSet = new BitSet();
    Map<Integer, Integer> partitionMap = Maps.newHashMap();

    int relColIndex = 0;
    for (String field : fieldNames) {
        final Integer partitionIndex = descriptor.getIdIfValid(field);
        if (partitionIndex != null) {
            fieldNameMap.put(partitionIndex, field);
            partitionColumnBitSet.set(partitionIndex);
            columnBitset.set(relColIndex);
            // mapping between the relColIndex and partitionIndex
            partitionMap.put(relColIndex, partitionIndex);
        }
        relColIndex++;
    }

    if (partitionColumnBitSet.isEmpty()) {
        logger.info("No partition columns are projected from the scan..continue. "
                + "Total pruning elapsed time: {} ms", totalPruningTime.elapsed(TimeUnit.MILLISECONDS));
        setPruneStatus(metaContext, PruneStatus.NOT_PRUNED);
        return;
    }

    // stop watch to track how long we spend in different phases of pruning
    Stopwatch miscTimer = Stopwatch.createUnstarted();

    // track how long we spend building the filter tree
    miscTimer.start();

    FindPartitionConditions c = new FindPartitionConditions(columnBitset,
            filterRel.getCluster().getRexBuilder());
    c.analyze(condition);
    RexNode pruneCondition = c.getFinalCondition();
    BitSet referencedDirsBitSet = c.getReferencedDirs();

    logger.info("Total elapsed time to build and analyze filter tree: {} ms",
            miscTimer.elapsed(TimeUnit.MILLISECONDS));
    miscTimer.reset();

    if (pruneCondition == null) {
        logger.info("No conditions were found eligible for partition pruning."
                + "Total pruning elapsed time: {} ms", totalPruningTime.elapsed(TimeUnit.MILLISECONDS));
        setPruneStatus(metaContext, PruneStatus.NOT_PRUNED);
        return;
    }

    // set up the partitions
    List<PartitionLocation> newPartitions = Lists.newArrayList();
    long numTotal = 0; // total number of partitions
    int batchIndex = 0;
    PartitionLocation firstLocation = null;
    LogicalExpression materializedExpr = null;
    String[] spInfo = null;
    int maxIndex = -1;
    BitSet matchBitSet = new BitSet();

    // Outer loop: iterate over a list of batches of PartitionLocations
    for (List<PartitionLocation> partitions : descriptor) {
        numTotal += partitions.size();
        logger.debug("Evaluating partition pruning for batch {}", batchIndex);
        if (batchIndex == 0) { // save the first location in case everything is pruned
            firstLocation = partitions.get(0);
        }
        final NullableBitVector output = new NullableBitVector(
                MaterializedField.create("", Types.optional(MinorType.BIT)), allocator);
        final VectorContainer container = new VectorContainer();

        try {
            final ValueVector[] vectors = new ValueVector[descriptor.getMaxHierarchyLevel()];
            for (int partitionColumnIndex : BitSets.toIter(partitionColumnBitSet)) {
                SchemaPath column = SchemaPath.getSimplePath(fieldNameMap.get(partitionColumnIndex));
                MajorType type = descriptor.getVectorType(column, settings);
                MaterializedField field = MaterializedField.create(column.getAsUnescapedPath(), type);
                ValueVector v = TypeHelper.getNewVector(field, allocator);
                v.allocateNew();
                vectors[partitionColumnIndex] = v;
                container.add(v);
            }

            // track how long we spend populating partition column vectors
            miscTimer.start();

            // populate partition vectors.
            descriptor.populatePartitionVectors(vectors, partitions, partitionColumnBitSet, fieldNameMap);

            logger.info("Elapsed time to populate partitioning column vectors: {} ms within batchIndex: {}",
                    miscTimer.elapsed(TimeUnit.MILLISECONDS), batchIndex);
            miscTimer.reset();

            // materialize the expression; only need to do this once
            if (batchIndex == 0) {
                materializedExpr = materializePruneExpr(pruneCondition, settings, scanRel, container);
                if (materializedExpr == null) {
                    // continue without partition pruning; no need to log anything here since
                    // materializePruneExpr logs it already
                    logger.info("Total pruning elapsed time: {} ms",
                            totalPruningTime.elapsed(TimeUnit.MILLISECONDS));
                    setPruneStatus(metaContext, PruneStatus.NOT_PRUNED);
                    return;
                }
            }

            output.allocateNew(partitions.size());

            // start the timer to evaluate how long we spend in the interpreter evaluation
            miscTimer.start();

            InterpreterEvaluator.evaluate(partitions.size(), optimizerContext, container, output,
                    materializedExpr);

            logger.info(
                    "Elapsed time in interpreter evaluation: {} ms within batchIndex: {} with # of partitions : {}",
                    miscTimer.elapsed(TimeUnit.MILLISECONDS), batchIndex, partitions.size());
            miscTimer.reset();

            int recordCount = 0;
            int qualifiedCount = 0;

            if (descriptor.supportsMetadataCachePruning() && partitions.get(0)
                    .isCompositePartition() /* apply single partition check only for composite partitions */) {
                // Inner loop: within each batch iterate over the PartitionLocations
                for (PartitionLocation part : partitions) {
                    assert part.isCompositePartition();
                    if (!output.getAccessor().isNull(recordCount)
                            && output.getAccessor().get(recordCount) == 1) {
                        newPartitions.add(part);
                        // Rather than using the PartitionLocation, get the array of partition values for the directories that are
                        // referenced by the filter since we are not interested in directory references in other parts of the query.
                        Pair<String[], Integer> p = composePartition(referencedDirsBitSet, partitionMap,
                                vectors, recordCount);
                        String[] parts = p.getLeft();
                        int tmpIndex = p.getRight();
                        maxIndex = Math.max(maxIndex, tmpIndex);
                        if (spInfo == null) { // initialization
                            spInfo = parts;
                            for (int j = 0; j <= tmpIndex; j++) {
                                if (parts[j] != null) {
                                    matchBitSet.set(j);
                                }
                            }
                        } else {
                            // compare the new partition with existing partition
                            for (int j = 0; j <= tmpIndex; j++) {
                                if (parts[j] == null || spInfo[j] == null) { // nulls don't match
                                    matchBitSet.clear(j);
                                } else {
                                    if (!parts[j].equals(spInfo[j])) {
                                        matchBitSet.clear(j);
                                    }
                                }
                            }
                        }
                        qualifiedCount++;
                    }
                    recordCount++;
                }
            } else {
                // Inner loop: within each batch iterate over the PartitionLocations
                for (PartitionLocation part : partitions) {
                    if (!output.getAccessor().isNull(recordCount)
                            && output.getAccessor().get(recordCount) == 1) {
                        newPartitions.add(part);
                        qualifiedCount++;
                    }
                    recordCount++;
                }
            }
            logger.debug("Within batch {}: total records: {}, qualified records: {}", batchIndex, recordCount,
                    qualifiedCount);
            batchIndex++;
        } catch (Exception e) {
            logger.warn("Exception while trying to prune partition.", e);
            logger.info("Total pruning elapsed time: {} ms", totalPruningTime.elapsed(TimeUnit.MILLISECONDS));

            setPruneStatus(metaContext, PruneStatus.NOT_PRUNED);
            return; // continue without partition pruning
        } finally {
            container.clear();
            if (output != null) {
                output.clear();
            }
        }
    }

    try {
        if (newPartitions.size() == numTotal) {
            logger.info("No partitions were eligible for pruning");
            return;
        }

        // handle the case all partitions are filtered out.
        boolean canDropFilter = true;
        boolean wasAllPartitionsPruned = false;
        String cacheFileRoot = null;

        if (newPartitions.isEmpty()) {
            assert firstLocation != null;
            // Add the first non-composite partition location, since execution requires schema.
            // In such case, we should not drop filter.
            newPartitions.add(firstLocation.getPartitionLocationRecursive().get(0));
            canDropFilter = false;
            // NOTE: with DRILL-4530, the PruneScanRule may be called with only a list of
            // directories first and the non-composite partition location will still return
            // directories, not files.  So, additional processing is done depending on this flag
            wasAllPartitionsPruned = true;
            logger.info(
                    "All {} partitions were pruned; added back a single partition to allow creating a schema",
                    numTotal);

            // set the cacheFileRoot appropriately
            if (firstLocation.isCompositePartition()) {
                cacheFileRoot = descriptor.getBaseTableLocation() + firstLocation.getCompositePartitionPath();
            }
        }

        logger.info("Pruned {} partitions down to {}", numTotal, newPartitions.size());

        List<RexNode> conjuncts = RelOptUtil.conjunctions(condition);
        List<RexNode> pruneConjuncts = RelOptUtil.conjunctions(pruneCondition);
        conjuncts.removeAll(pruneConjuncts);
        RexNode newCondition = RexUtil.composeConjunction(filterRel.getCluster().getRexBuilder(), conjuncts,
                false);

        RewriteCombineBinaryOperators reverseVisitor = new RewriteCombineBinaryOperators(true,
                filterRel.getCluster().getRexBuilder());

        condition = condition.accept(reverseVisitor);
        pruneCondition = pruneCondition.accept(reverseVisitor);

        if (descriptor.supportsMetadataCachePruning() && !wasAllPartitionsPruned) {
            // if metadata cache file could potentially be used, then assign a proper cacheFileRoot
            int index = -1;
            if (!matchBitSet.isEmpty()) {
                String path = "";
                index = matchBitSet.length() - 1;

                for (int j = 0; j < matchBitSet.length(); j++) {
                    if (!matchBitSet.get(j)) {
                        // stop at the first index with no match and use the immediate
                        // previous index
                        index = j - 1;
                        break;
                    }
                }
                for (int j = 0; j <= index; j++) {
                    path += "/" + spInfo[j];
                }
                cacheFileRoot = descriptor.getBaseTableLocation() + path;
            }
            if (index != maxIndex) {
                // if multiple partitions are being selected, we should not drop the filter
                // since we are reading the cache file at a parent/ancestor level
                canDropFilter = false;
            }

        }

        RelNode inputRel = descriptor.supportsMetadataCachePruning()
                ? descriptor.createTableScan(newPartitions, cacheFileRoot, wasAllPartitionsPruned, metaContext)
                : descriptor.createTableScan(newPartitions, wasAllPartitionsPruned);

        if (projectRel != null) {
            inputRel = projectRel.copy(projectRel.getTraitSet(), Collections.singletonList(inputRel));
        }

        if (newCondition.isAlwaysTrue() && canDropFilter) {
            call.transformTo(inputRel);
        } else {
            final RelNode newFilter = filterRel.copy(filterRel.getTraitSet(),
                    Collections.singletonList(inputRel));
            call.transformTo(newFilter);
        }

        setPruneStatus(metaContext, PruneStatus.PRUNED);

    } catch (Exception e) {
        logger.warn("Exception while using the pruned partitions.", e);
    } finally {
        logger.info("Total pruning elapsed time: {} ms", totalPruningTime.elapsed(TimeUnit.MILLISECONDS));
    }
}