Example usage for java.util BitSet set

List of usage examples for java.util BitSet set

Introduction

In this page you can find the example usage for java.util BitSet set.

Prototype

public void set(int bitIndex) 

Source Link

Document

Sets the bit at the specified index to true .

Usage

From source file:model.DecomposableModel.java

/**
 * Compute the difference in the entropy from this model, to one that would
 * add vertex1 and vertex2 to it//from w ww  .j  a  va  2 s.  c om
 * 
 * @param a
 * @param b
 * @param computer
 * @return
 */
public int treeWidthIfAdding(Integer a, Integer b) {
    // System.out.println("computing actual entropy");
    BitSet Sab = graph.getSeparator(a, b);
    BitSet Sabuaub = (BitSet) Sab.clone();
    Sabuaub.set(a);
    Sabuaub.set(b);
    return Sabuaub.cardinality();

}

From source file:org.wso2.andes.subscription.TopicSubscriptionBitMapStore.java

/**
 * @param constituentIndex The index to create the constituent for
 * @return The created constituent table
 *//* w w  w. java2 s  . co  m*/
private Map<String, BitSet> addConstituentTable(int constituentIndex) {
    Map<String, BitSet> constituentTable = new HashMap<String, BitSet>();

    BitSet nullBitSet = new BitSet(subscriptionList.size());
    BitSet otherBitSet = new BitSet(subscriptionList.size());

    // Fill null and other constituent values for all available subscriptions
    for (int subscriptionIndex = 0; subscriptionIndex < subscriptionList.size(); subscriptionIndex++) {
        String[] constituentsOfSubscription = subscriptionConstituents.get(subscriptionIndex);

        if (constituentsOfSubscription.length < constituentIndex + 1) {
            // There is no constituent in this subscription for this constituent index
            nullBitSet.set(subscriptionIndex);

            // If last constituent of the subscription is multiLevelWildCard, then any other is a match
            if (multiLevelWildCard.equals(constituentsOfSubscription[constituentsOfSubscription.length - 1])) {
                otherBitSet.set(subscriptionIndex);
            }
        } else {
            String subscriptionConstituent = constituentsOfSubscription[constituentIndex];

            // Check if this is a wildcard
            if (multiLevelWildCard.equals(subscriptionConstituent)
                    || singleLevelWildCard.equals(subscriptionConstituent)) {
                otherBitSet.set(subscriptionIndex);
            }
        }
    }

    // Add 'null' and 'other' constituent
    constituentTable.put(NULL_CONSTITUENT, nullBitSet);
    constituentTable.put(OTHER_CONSTITUENT, otherBitSet);

    constituentTables.add(constituentIndex, constituentTable);

    return constituentTable;
}

From source file:org.apache.hadoop.mapred.TestSequenceFileAsTextInputFormat.java

public void testFormat() throws Exception {
    JobConf job = new JobConf(conf);
    FileSystem fs = FileSystem.getLocal(conf);
    Path dir = new Path(System.getProperty("test.build.data", ".") + "/mapred");
    Path file = new Path(dir, "test.seq");

    Reporter reporter = Reporter.NULL;//from   w  w  w  . j a v  a2  s.c o  m

    int seed = new Random().nextInt();
    //LOG.info("seed = "+seed);
    Random random = new Random(seed);

    fs.delete(dir, true);

    FileInputFormat.setInputPaths(job, dir);

    // for a variety of lengths
    for (int length = 0; length < MAX_LENGTH; length += random.nextInt(MAX_LENGTH / 10) + 1) {

        //LOG.info("creating; entries = " + length);

        // create a file with length entries
        SequenceFile.Writer writer = SequenceFile.createWriter(fs, conf, file, IntWritable.class,
                LongWritable.class);
        try {
            for (int i = 0; i < length; i++) {
                IntWritable key = new IntWritable(i);
                LongWritable value = new LongWritable(10 * i);
                writer.append(key, value);
            }
        } finally {
            writer.close();
        }

        // try splitting the file in a variety of sizes
        InputFormat<Text, Text> format = new SequenceFileAsTextInputFormat();

        for (int i = 0; i < 3; i++) {
            int numSplits = random.nextInt(MAX_LENGTH / (SequenceFile.SYNC_INTERVAL / 20)) + 1;
            //LOG.info("splitting: requesting = " + numSplits);
            InputSplit[] splits = format.getSplits(job, numSplits);
            //LOG.info("splitting: got =        " + splits.length);

            // check each split
            BitSet bits = new BitSet(length);
            for (int j = 0; j < splits.length; j++) {
                RecordReader<Text, Text> reader = format.getRecordReader(splits[j], job, reporter);
                Class readerClass = reader.getClass();
                assertEquals("reader class is SequenceFileAsTextRecordReader.",
                        SequenceFileAsTextRecordReader.class, readerClass);
                Text value = reader.createValue();
                Text key = reader.createKey();
                try {
                    int count = 0;
                    while (reader.next(key, value)) {
                        // if (bits.get(key.get())) {
                        // LOG.info("splits["+j+"]="+splits[j]+" : " + key.get());
                        // LOG.info("@"+reader.getPos());
                        // }
                        int keyInt = Integer.parseInt(key.toString());
                        assertFalse("Key in multiple partitions.", bits.get(keyInt));
                        bits.set(keyInt);
                        count++;
                    }
                    //LOG.info("splits["+j+"]="+splits[j]+" count=" + count);
                } finally {
                    reader.close();
                }
            }
            assertEquals("Some keys in no partition.", length, bits.cardinality());
        }

    }
}

From source file:juicebox.data.MatrixZoomData.java

/**
 * Computes eigenvector from Pearson's./*from  www  .j ava  2  s  . co  m*/
 *
 * @param df    Expected values, needed to get Pearson's
 * @param which Which eigenvector; 0 is principal.
 * @return Eigenvector
 */
public double[] computeEigenvector(ExpectedValueFunction df, int which) {
    BasicMatrix pearsons = getPearsons(df);
    if (pearsons == null) {
        return null;
    }

    int dim = pearsons.getRowDimension();
    double[][] data = new double[dim][dim];
    BitSet bitSet = new BitSet(dim);
    for (int i = 0; i < dim; i++) {
        for (int j = 0; j < dim; j++) {
            float tmp = pearsons.getEntry(i, j);
            data[i][j] = tmp;
            if (data[i][j] != 0 && !Float.isNaN(tmp)) {
                bitSet.set(i);
            }
        }
    }

    int[] nonCentromereColumns = new int[bitSet.cardinality()];
    int count = 0;
    for (int i = 0; i < dim; i++) {
        if (bitSet.get(i))
            nonCentromereColumns[count++] = i;
    }

    RealMatrix subMatrix = new Array2DRowRealMatrix(data).getSubMatrix(nonCentromereColumns,
            nonCentromereColumns);
    RealVector rv = (new EigenDecompositionImpl(subMatrix, 0)).getEigenvector(which);

    double[] ev = rv.toArray();

    int size = pearsons.getColumnDimension();
    double[] eigenvector = new double[size];
    int num = 0;
    for (int i = 0; i < size; i++) {
        if (num < nonCentromereColumns.length && i == nonCentromereColumns[num]) {
            eigenvector[i] = ev[num];
            num++;
        } else {
            eigenvector[i] = Double.NaN;
        }
    }
    return eigenvector;

}

From source file:org.apache.hadoop.mapred.TestTextInputFormat.java

public void testFormat() throws Exception {
    JobConf job = new JobConf();
    Path file = new Path(workDir, "test.txt");

    // A reporter that does nothing
    Reporter reporter = Reporter.NULL;/*from   www  .  j ava  2 s. c  o m*/

    int seed = new Random().nextInt();
    LOG.info("seed = " + seed);
    Random random = new Random(seed);

    localFs.delete(workDir, true);
    FileInputFormat.setInputPaths(job, workDir);

    // for a variety of lengths
    for (int length = 0; length < MAX_LENGTH; length += random.nextInt(MAX_LENGTH / 10) + 1) {

        LOG.debug("creating; entries = " + length);

        // create a file with length entries
        Writer writer = new OutputStreamWriter(localFs.create(file));
        try {
            for (int i = 0; i < length; i++) {
                writer.write(Integer.toString(i));
                writer.write("\n");
            }
        } finally {
            writer.close();
        }

        // try splitting the file in a variety of sizes
        TextInputFormat format = new TextInputFormat();
        format.configure(job);
        LongWritable key = new LongWritable();
        Text value = new Text();
        for (int i = 0; i < 3; i++) {
            int numSplits = random.nextInt(MAX_LENGTH / 20) + 1;
            LOG.debug("splitting: requesting = " + numSplits);
            InputSplit[] splits = format.getSplits(job, numSplits);
            LOG.debug("splitting: got =        " + splits.length);

            if (length == 0) {
                assertEquals("Files of length 0 are not returned from FileInputFormat.getSplits().", 1,
                        splits.length);
                assertEquals("Empty file length == 0", 0, splits[0].getLength());
            }

            // check each split
            BitSet bits = new BitSet(length);
            for (int j = 0; j < splits.length; j++) {
                LOG.debug("split[" + j + "]= " + splits[j]);
                RecordReader<LongWritable, Text> reader = format.getRecordReader(splits[j], job, reporter);
                try {
                    int count = 0;
                    while (reader.next(key, value)) {
                        int v = Integer.parseInt(value.toString());
                        LOG.debug("read " + v);
                        if (bits.get(v)) {
                            LOG.warn("conflict with " + v + " in split " + j + " at position "
                                    + reader.getPos());
                        }
                        assertFalse("Key in multiple partitions.", bits.get(v));
                        bits.set(v);
                        count++;
                    }
                    LOG.debug("splits[" + j + "]=" + splits[j] + " count=" + count);
                } finally {
                    reader.close();
                }
            }
            assertEquals("Some keys in no partition.", length, bits.cardinality());
        }

    }
}

From source file:org.wso2.andes.kernel.router.TopicRoutingMatcher.java

/**
 * This methods adds a constituent table with only null and other constituents.
 * This is required when a message comes with more than the available number of constituents. If wildcard
 * queues are available for those, they should match. Hence need to create these empty constituent tables.
 *///from ww w . j a v  a 2  s.com
private void addEmptyConstituentTable() {
    int noOfqueues = storageQueueList.size();
    Map<String, BitSet> constituentTable = new HashMap<>();

    BitSet nullBitSet = new BitSet(noOfqueues);
    BitSet otherBitSet = new BitSet(noOfqueues);

    if (noOfqueues > 0) {

        // Null constituent will always be true for empty constituents, hence need to flip
        nullBitSet.flip(0, noOfqueues - 1);

        for (int queueIndex = 0; queueIndex < noOfqueues; queueIndex++) {
            // For 'other', if subscribers last constituent is multi level wild card then matching
            String[] allConstituent = queueConstituents.get(queueIndex);
            String lastConstituent = allConstituent[allConstituent.length - 1];

            if (multiLevelWildCard.equals(lastConstituent)) {
                otherBitSet.set(queueIndex);
            } else {
                otherBitSet.set(queueIndex, false);
            }
        }
    }

    constituentTable.put(NULL_CONSTITUENT, nullBitSet);
    constituentTable.put(OTHER_CONSTITUENT, otherBitSet);

    constituentTables.add(constituentTable);
}

From source file:org.wso2.andes.subscription.ClusterSubscriptionBitMapHandler.java

/**
 * @param constituentIndex The index to create the constituent for
 * @return The created constituent table
 */// w  w  w.  ja v  a  2 s .  c om
private Map<String, BitSet> addConstituentTable(int constituentIndex) {
    Map<String, BitSet> constituentTable = new HashMap<String, BitSet>();

    BitSet nullBitSet = new BitSet(wildCardSubscriptionList.size());
    BitSet otherBitSet = new BitSet(wildCardSubscriptionList.size());

    // Fill null and other constituent values for all available subscriptions
    for (int subscriptionIndex = 0; subscriptionIndex < wildCardSubscriptionList.size(); subscriptionIndex++) {
        String[] constituentsOfSubscription = subscriptionConstituents.get(subscriptionIndex);

        if (constituentsOfSubscription.length < constituentIndex + 1) {
            // There is no constituent in this subscription for this constituent index
            nullBitSet.set(subscriptionIndex);

            // If last constituent of the subscription is multiLevelWildCard, then any other is a match
            if (multiLevelWildCard.equals(constituentsOfSubscription[constituentsOfSubscription.length - 1])) {
                otherBitSet.set(subscriptionIndex);
            }
        } else {
            String subscriptionConstituent = constituentsOfSubscription[constituentIndex];

            // Check if this is a wildcard
            if (multiLevelWildCard.equals(subscriptionConstituent)
                    || singleLevelWildCard.equals(subscriptionConstituent)) {
                otherBitSet.set(subscriptionIndex);
            }
        }
    }

    // Add 'null' and 'other' constituent
    constituentTable.put(NULL_CONSTITUENT, nullBitSet);
    constituentTable.put(OTHER_CONSTITUENT, otherBitSet);

    constituentTables.add(constituentIndex, constituentTable);

    return constituentTable;
}

From source file:org.apache.drill.exec.planner.logical.partition.PruneScanRule.java

protected void doOnMatch(RelOptRuleCall call, Filter filterRel, Project projectRel, TableScan scanRel) {

    final String pruningClassName = getClass().getName();
    logger.info("Beginning partition pruning, pruning class: {}", pruningClassName);
    Stopwatch totalPruningTime = Stopwatch.createStarted();

    final PlannerSettings settings = PrelUtil.getPlannerSettings(call.getPlanner());
    PartitionDescriptor descriptor = getPartitionDescriptor(settings, scanRel);
    final BufferAllocator allocator = optimizerContext.getAllocator();

    final Object selection = getDrillTable(scanRel).getSelection();
    MetadataContext metaContext = null;/*from   w ww . j av a2s . c om*/
    if (selection instanceof FormatSelection) {
        metaContext = ((FormatSelection) selection).getSelection().getMetaContext();
    }

    RexNode condition = null;
    if (projectRel == null) {
        condition = filterRel.getCondition();
    } else {
        // get the filter as if it were below the projection.
        condition = RelOptUtil.pushFilterPastProject(filterRel.getCondition(), projectRel);
    }

    RewriteAsBinaryOperators visitor = new RewriteAsBinaryOperators(true,
            filterRel.getCluster().getRexBuilder());
    condition = condition.accept(visitor);

    Map<Integer, String> fieldNameMap = Maps.newHashMap();
    List<String> fieldNames = scanRel.getRowType().getFieldNames();
    BitSet columnBitset = new BitSet();
    BitSet partitionColumnBitSet = new BitSet();
    Map<Integer, Integer> partitionMap = Maps.newHashMap();

    int relColIndex = 0;
    for (String field : fieldNames) {
        final Integer partitionIndex = descriptor.getIdIfValid(field);
        if (partitionIndex != null) {
            fieldNameMap.put(partitionIndex, field);
            partitionColumnBitSet.set(partitionIndex);
            columnBitset.set(relColIndex);
            // mapping between the relColIndex and partitionIndex
            partitionMap.put(relColIndex, partitionIndex);
        }
        relColIndex++;
    }

    if (partitionColumnBitSet.isEmpty()) {
        logger.info("No partition columns are projected from the scan..continue. "
                + "Total pruning elapsed time: {} ms", totalPruningTime.elapsed(TimeUnit.MILLISECONDS));
        setPruneStatus(metaContext, PruneStatus.NOT_PRUNED);
        return;
    }

    // stop watch to track how long we spend in different phases of pruning
    Stopwatch miscTimer = Stopwatch.createUnstarted();

    // track how long we spend building the filter tree
    miscTimer.start();

    FindPartitionConditions c = new FindPartitionConditions(columnBitset,
            filterRel.getCluster().getRexBuilder());
    c.analyze(condition);
    RexNode pruneCondition = c.getFinalCondition();
    BitSet referencedDirsBitSet = c.getReferencedDirs();

    logger.info("Total elapsed time to build and analyze filter tree: {} ms",
            miscTimer.elapsed(TimeUnit.MILLISECONDS));
    miscTimer.reset();

    if (pruneCondition == null) {
        logger.info("No conditions were found eligible for partition pruning."
                + "Total pruning elapsed time: {} ms", totalPruningTime.elapsed(TimeUnit.MILLISECONDS));
        setPruneStatus(metaContext, PruneStatus.NOT_PRUNED);
        return;
    }

    // set up the partitions
    List<PartitionLocation> newPartitions = Lists.newArrayList();
    long numTotal = 0; // total number of partitions
    int batchIndex = 0;
    PartitionLocation firstLocation = null;
    LogicalExpression materializedExpr = null;
    String[] spInfo = null;
    int maxIndex = -1;
    BitSet matchBitSet = new BitSet();

    // Outer loop: iterate over a list of batches of PartitionLocations
    for (List<PartitionLocation> partitions : descriptor) {
        numTotal += partitions.size();
        logger.debug("Evaluating partition pruning for batch {}", batchIndex);
        if (batchIndex == 0) { // save the first location in case everything is pruned
            firstLocation = partitions.get(0);
        }
        final NullableBitVector output = new NullableBitVector(
                MaterializedField.create("", Types.optional(MinorType.BIT)), allocator);
        final VectorContainer container = new VectorContainer();

        try {
            final ValueVector[] vectors = new ValueVector[descriptor.getMaxHierarchyLevel()];
            for (int partitionColumnIndex : BitSets.toIter(partitionColumnBitSet)) {
                SchemaPath column = SchemaPath.getSimplePath(fieldNameMap.get(partitionColumnIndex));
                MajorType type = descriptor.getVectorType(column, settings);
                MaterializedField field = MaterializedField.create(column.getAsUnescapedPath(), type);
                ValueVector v = TypeHelper.getNewVector(field, allocator);
                v.allocateNew();
                vectors[partitionColumnIndex] = v;
                container.add(v);
            }

            // track how long we spend populating partition column vectors
            miscTimer.start();

            // populate partition vectors.
            descriptor.populatePartitionVectors(vectors, partitions, partitionColumnBitSet, fieldNameMap);

            logger.info("Elapsed time to populate partitioning column vectors: {} ms within batchIndex: {}",
                    miscTimer.elapsed(TimeUnit.MILLISECONDS), batchIndex);
            miscTimer.reset();

            // materialize the expression; only need to do this once
            if (batchIndex == 0) {
                materializedExpr = materializePruneExpr(pruneCondition, settings, scanRel, container);
                if (materializedExpr == null) {
                    // continue without partition pruning; no need to log anything here since
                    // materializePruneExpr logs it already
                    logger.info("Total pruning elapsed time: {} ms",
                            totalPruningTime.elapsed(TimeUnit.MILLISECONDS));
                    setPruneStatus(metaContext, PruneStatus.NOT_PRUNED);
                    return;
                }
            }

            output.allocateNew(partitions.size());

            // start the timer to evaluate how long we spend in the interpreter evaluation
            miscTimer.start();

            InterpreterEvaluator.evaluate(partitions.size(), optimizerContext, container, output,
                    materializedExpr);

            logger.info(
                    "Elapsed time in interpreter evaluation: {} ms within batchIndex: {} with # of partitions : {}",
                    miscTimer.elapsed(TimeUnit.MILLISECONDS), batchIndex, partitions.size());
            miscTimer.reset();

            int recordCount = 0;
            int qualifiedCount = 0;

            if (descriptor.supportsMetadataCachePruning() && partitions.get(0)
                    .isCompositePartition() /* apply single partition check only for composite partitions */) {
                // Inner loop: within each batch iterate over the PartitionLocations
                for (PartitionLocation part : partitions) {
                    assert part.isCompositePartition();
                    if (!output.getAccessor().isNull(recordCount)
                            && output.getAccessor().get(recordCount) == 1) {
                        newPartitions.add(part);
                        // Rather than using the PartitionLocation, get the array of partition values for the directories that are
                        // referenced by the filter since we are not interested in directory references in other parts of the query.
                        Pair<String[], Integer> p = composePartition(referencedDirsBitSet, partitionMap,
                                vectors, recordCount);
                        String[] parts = p.getLeft();
                        int tmpIndex = p.getRight();
                        maxIndex = Math.max(maxIndex, tmpIndex);
                        if (spInfo == null) { // initialization
                            spInfo = parts;
                            for (int j = 0; j <= tmpIndex; j++) {
                                if (parts[j] != null) {
                                    matchBitSet.set(j);
                                }
                            }
                        } else {
                            // compare the new partition with existing partition
                            for (int j = 0; j <= tmpIndex; j++) {
                                if (parts[j] == null || spInfo[j] == null) { // nulls don't match
                                    matchBitSet.clear(j);
                                } else {
                                    if (!parts[j].equals(spInfo[j])) {
                                        matchBitSet.clear(j);
                                    }
                                }
                            }
                        }
                        qualifiedCount++;
                    }
                    recordCount++;
                }
            } else {
                // Inner loop: within each batch iterate over the PartitionLocations
                for (PartitionLocation part : partitions) {
                    if (!output.getAccessor().isNull(recordCount)
                            && output.getAccessor().get(recordCount) == 1) {
                        newPartitions.add(part);
                        qualifiedCount++;
                    }
                    recordCount++;
                }
            }
            logger.debug("Within batch {}: total records: {}, qualified records: {}", batchIndex, recordCount,
                    qualifiedCount);
            batchIndex++;
        } catch (Exception e) {
            logger.warn("Exception while trying to prune partition.", e);
            logger.info("Total pruning elapsed time: {} ms", totalPruningTime.elapsed(TimeUnit.MILLISECONDS));

            setPruneStatus(metaContext, PruneStatus.NOT_PRUNED);
            return; // continue without partition pruning
        } finally {
            container.clear();
            if (output != null) {
                output.clear();
            }
        }
    }

    try {
        if (newPartitions.size() == numTotal) {
            logger.info("No partitions were eligible for pruning");
            return;
        }

        // handle the case all partitions are filtered out.
        boolean canDropFilter = true;
        boolean wasAllPartitionsPruned = false;
        String cacheFileRoot = null;

        if (newPartitions.isEmpty()) {
            assert firstLocation != null;
            // Add the first non-composite partition location, since execution requires schema.
            // In such case, we should not drop filter.
            newPartitions.add(firstLocation.getPartitionLocationRecursive().get(0));
            canDropFilter = false;
            // NOTE: with DRILL-4530, the PruneScanRule may be called with only a list of
            // directories first and the non-composite partition location will still return
            // directories, not files.  So, additional processing is done depending on this flag
            wasAllPartitionsPruned = true;
            logger.info(
                    "All {} partitions were pruned; added back a single partition to allow creating a schema",
                    numTotal);

            // set the cacheFileRoot appropriately
            if (firstLocation.isCompositePartition()) {
                cacheFileRoot = descriptor.getBaseTableLocation() + firstLocation.getCompositePartitionPath();
            }
        }

        logger.info("Pruned {} partitions down to {}", numTotal, newPartitions.size());

        List<RexNode> conjuncts = RelOptUtil.conjunctions(condition);
        List<RexNode> pruneConjuncts = RelOptUtil.conjunctions(pruneCondition);
        conjuncts.removeAll(pruneConjuncts);
        RexNode newCondition = RexUtil.composeConjunction(filterRel.getCluster().getRexBuilder(), conjuncts,
                false);

        RewriteCombineBinaryOperators reverseVisitor = new RewriteCombineBinaryOperators(true,
                filterRel.getCluster().getRexBuilder());

        condition = condition.accept(reverseVisitor);
        pruneCondition = pruneCondition.accept(reverseVisitor);

        if (descriptor.supportsMetadataCachePruning() && !wasAllPartitionsPruned) {
            // if metadata cache file could potentially be used, then assign a proper cacheFileRoot
            int index = -1;
            if (!matchBitSet.isEmpty()) {
                String path = "";
                index = matchBitSet.length() - 1;

                for (int j = 0; j < matchBitSet.length(); j++) {
                    if (!matchBitSet.get(j)) {
                        // stop at the first index with no match and use the immediate
                        // previous index
                        index = j - 1;
                        break;
                    }
                }
                for (int j = 0; j <= index; j++) {
                    path += "/" + spInfo[j];
                }
                cacheFileRoot = descriptor.getBaseTableLocation() + path;
            }
            if (index != maxIndex) {
                // if multiple partitions are being selected, we should not drop the filter
                // since we are reading the cache file at a parent/ancestor level
                canDropFilter = false;
            }

        }

        RelNode inputRel = descriptor.supportsMetadataCachePruning()
                ? descriptor.createTableScan(newPartitions, cacheFileRoot, wasAllPartitionsPruned, metaContext)
                : descriptor.createTableScan(newPartitions, wasAllPartitionsPruned);

        if (projectRel != null) {
            inputRel = projectRel.copy(projectRel.getTraitSet(), Collections.singletonList(inputRel));
        }

        if (newCondition.isAlwaysTrue() && canDropFilter) {
            call.transformTo(inputRel);
        } else {
            final RelNode newFilter = filterRel.copy(filterRel.getTraitSet(),
                    Collections.singletonList(inputRel));
            call.transformTo(newFilter);
        }

        setPruneStatus(metaContext, PruneStatus.PRUNED);

    } catch (Exception e) {
        logger.warn("Exception while using the pruned partitions.", e);
    } finally {
        logger.info("Total pruning elapsed time: {} ms", totalPruningTime.elapsed(TimeUnit.MILLISECONDS));
    }
}

From source file:org.apache.hadoop.mapreduce.lib.input.TestKeyValueTextInputFormat.java

public void testFormat() throws Exception {
    Job job = new Job(defaultConf);
    Path file = new Path(workDir, "test.txt");

    int seed = new Random().nextInt();
    LOG.info("seed = " + seed);
    Random random = new Random(seed);

    localFs.delete(workDir, true);/* www  .j  a  va  2s.  c o  m*/
    FileInputFormat.setInputPaths(job, workDir);

    // for a variety of lengths
    for (int length = 0; length < MAX_LENGTH; length += random.nextInt(MAX_LENGTH / 10) + 1) {

        LOG.debug("creating; entries = " + length);

        // create a file with length entries
        Writer writer = new OutputStreamWriter(localFs.create(file));
        try {
            for (int i = 0; i < length; i++) {
                writer.write(Integer.toString(i * 2));
                writer.write("\t");
                writer.write(Integer.toString(i));
                writer.write("\n");
            }
        } finally {
            writer.close();
        }

        KeyValueTextInputFormat format = new KeyValueTextInputFormat();
        JobContext jobContext = new JobContext(job.getConfiguration(), new JobID());
        List<InputSplit> splits = format.getSplits(jobContext);
        LOG.debug("splitting: got =        " + splits.size());

        TaskAttemptContext context = new TaskAttemptContext(job.getConfiguration(), new TaskAttemptID());

        // check each split
        BitSet bits = new BitSet(length);
        for (InputSplit split : splits) {
            LOG.debug("split= " + split);
            RecordReader<Text, Text> reader = format.createRecordReader(split, context);
            Class readerClass = reader.getClass();
            assertEquals("reader class is KeyValueLineRecordReader.", KeyValueLineRecordReader.class,
                    readerClass);

            reader.initialize(split, context);
            try {
                int count = 0;
                while (reader.nextKeyValue()) {
                    int v = Integer.parseInt(reader.getCurrentValue().toString());
                    LOG.debug("read " + v);
                    if (bits.get(v)) {
                        LOG.warn("conflict with " + v + " in split " + split + " at " + reader.getProgress());
                    }
                    assertFalse("Key in multiple partitions.", bits.get(v));
                    bits.set(v);
                    count++;
                }
                LOG.debug("split=" + split + " count=" + count);
            } finally {
                reader.close();
            }
        }
        assertEquals("Some keys in no partition.", length, bits.cardinality());

    }
}

From source file:org.wso2.andes.kernel.router.TopicRoutingMatcher.java

/**
 * Add a new constituent row for the given constituent index table and fill values for already available
 * queues.//from w ww  . ja va  2 s .  c om
 *
 * @param constituent      The constituent to add
 * @param constituentIndex The index of the constituent
 */
private void addConstituentRow(String constituent, int constituentIndex) {
    Map<String, BitSet> constituentTable = constituentTables.get(constituentIndex);
    BitSet bitSet = new BitSet();

    for (int i = 0; i < queueConstituents.size(); i++) {
        String[] constituentsOfQueue = queueConstituents.get(i);

        if (constituentIndex < constituentsOfQueue.length) {
            // Get the i'th queue's [constituentIndex]'th constituent
            String queueConstituent = constituentsOfQueue[constituentIndex];
            if (queueConstituent.equals(constituent) || multiLevelWildCard.equals(queueConstituent)
                    || singleLevelWildCard.equals(queueConstituent)) {
                // The new constituent matches the queues i'th constituent
                bitSet.set(i);
            } else {
                // The new constituent does not match the i'th queues [constituentIndex] constituent
                bitSet.set(i, false);
            }
        } else {
            // The queue does not have a constituent for this index
            // If the last constituent of the queue is multiLevelWildCard we match else false
            if (multiLevelWildCard.equals(constituentsOfQueue[constituentsOfQueue.length - 1])) {
                bitSet.set(i);
            } else {
                bitSet.set(i, false);
            }
        }
    }

    constituentTable.put(constituent, bitSet);
}