Example usage for java.util BitSet cardinality

List of usage examples for java.util BitSet cardinality

Introduction

In this page you can find the example usage for java.util BitSet cardinality.

Prototype

public int cardinality() 

Source Link

Document

Returns the number of bits set to true in this BitSet .

Usage

From source file:org.apache.hadoop.mapreduce.lib.input.TestMRKeyValueTextInputFormat.java

@Test
public void testFormat() throws Exception {
    Job job = Job.getInstance(new Configuration(defaultConf));
    Path file = new Path(workDir, "test.txt");

    int seed = new Random().nextInt();
    LOG.info("seed = " + seed);
    Random random = new Random(seed);

    localFs.delete(workDir, true);/* w  w  w. j  a  va  2  s . c om*/
    FileInputFormat.setInputPaths(job, workDir);

    final int MAX_LENGTH = 10000;
    // for a variety of lengths
    for (int length = 0; length < MAX_LENGTH; length += random.nextInt(MAX_LENGTH / 10) + 1) {

        LOG.debug("creating; entries = " + length);

        // create a file with length entries
        Writer writer = new OutputStreamWriter(localFs.create(file));
        try {
            for (int i = 0; i < length; i++) {
                writer.write(Integer.toString(i * 2));
                writer.write("\t");
                writer.write(Integer.toString(i));
                writer.write("\n");
            }
        } finally {
            writer.close();
        }

        // try splitting the file in a variety of sizes
        KeyValueTextInputFormat format = new KeyValueTextInputFormat();
        for (int i = 0; i < 3; i++) {
            int numSplits = random.nextInt(MAX_LENGTH / 20) + 1;
            LOG.debug("splitting: requesting = " + numSplits);
            List<InputSplit> splits = format.getSplits(job);
            LOG.debug("splitting: got =        " + splits.size());

            // check each split
            BitSet bits = new BitSet(length);
            for (int j = 0; j < splits.size(); j++) {
                LOG.debug("split[" + j + "]= " + splits.get(j));
                TaskAttemptContext context = MapReduceTestUtil
                        .createDummyMapTaskAttemptContext(job.getConfiguration());
                RecordReader<Text, Text> reader = format.createRecordReader(splits.get(j), context);
                Class<?> clazz = reader.getClass();
                assertEquals("reader class is KeyValueLineRecordReader.", KeyValueLineRecordReader.class,
                        clazz);
                MapContext<Text, Text, Text, Text> mcontext = new MapContextImpl<Text, Text, Text, Text>(
                        job.getConfiguration(), context.getTaskAttemptID(), reader, null, null,
                        MapReduceTestUtil.createDummyReporter(), splits.get(j));
                reader.initialize(splits.get(j), mcontext);

                Text key = null;
                Text value = null;
                try {
                    int count = 0;
                    while (reader.nextKeyValue()) {
                        key = reader.getCurrentKey();
                        clazz = key.getClass();
                        assertEquals("Key class is Text.", Text.class, clazz);
                        value = reader.getCurrentValue();
                        clazz = value.getClass();
                        assertEquals("Value class is Text.", Text.class, clazz);
                        final int k = Integer.parseInt(key.toString());
                        final int v = Integer.parseInt(value.toString());
                        assertEquals("Bad key", 0, k % 2);
                        assertEquals("Mismatched key/value", k / 2, v);
                        LOG.debug("read " + v);
                        assertFalse("Key in multiple partitions.", bits.get(v));
                        bits.set(v);
                        count++;
                    }
                    LOG.debug("splits[" + j + "]=" + splits.get(j) + " count=" + count);
                } finally {
                    reader.close();
                }
            }
            assertEquals("Some keys in no partition.", length, bits.cardinality());
        }

    }
}

From source file:org.springframework.kafka.listener.KafkaMessageListenerContainerTests.java

@Test
public void testSlowListener() throws Exception {
    logger.info("Start " + this.testName.getMethodName());
    Map<String, Object> props = KafkaTestUtils.consumerProps("slow1", "false", embeddedKafka);
    //      props.put(ConsumerConfig.MAX_PARTITION_FETCH_BYTES_CONFIG, 6); // 2 per poll
    DefaultKafkaConsumerFactory<Integer, String> cf = new DefaultKafkaConsumerFactory<Integer, String>(props);
    ContainerProperties containerProps = new ContainerProperties(topic1);

    final CountDownLatch latch = new CountDownLatch(6);
    final BitSet bitSet = new BitSet(6);
    containerProps.setMessageListener((MessageListener<Integer, String>) message -> {
        logger.info("slow1: " + message);
        bitSet.set((int) (message.partition() * 3 + message.offset()));
        try {//  w  w w  . j a  v a 2s  . c om
            Thread.sleep(1000);
        } catch (InterruptedException e) {
            Thread.currentThread().interrupt();
        }
        latch.countDown();
    });
    containerProps.setPauseAfter(100);
    KafkaMessageListenerContainer<Integer, String> container = new KafkaMessageListenerContainer<>(cf,
            containerProps);
    container.setBeanName("testSlow1");

    container.start();
    Consumer<?, ?> consumer = spyOnConsumer(container);
    ContainerTestUtils.waitForAssignment(container, embeddedKafka.getPartitionsPerTopic());

    Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
    ProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<Integer, String>(senderProps);
    KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf);
    template.setDefaultTopic(topic1);
    template.sendDefault(0, "foo");
    template.sendDefault(2, "bar");
    template.sendDefault(0, "baz");
    template.sendDefault(2, "qux");
    template.flush();
    Thread.sleep(300);
    template.sendDefault(0, "fiz");
    template.sendDefault(2, "buz");
    template.flush();
    assertThat(latch.await(60, TimeUnit.SECONDS)).isTrue();
    assertThat(bitSet.cardinality()).isEqualTo(6);
    verify(consumer, atLeastOnce()).pause(anyObject());
    verify(consumer, atLeastOnce()).resume(anyObject());
    container.stop();
    logger.info("Stop " + this.testName.getMethodName());
}

From source file:org.springframework.kafka.listener.KafkaMessageListenerContainerTests.java

private void testSlowListenerManualGuts(AckMode ackMode, String topic) throws Exception {
    logger.info("Start " + this.testName.getMethodName() + ackMode);
    Map<String, Object> props = KafkaTestUtils.consumerProps("slow2", "false", embeddedKafka);
    DefaultKafkaConsumerFactory<Integer, String> cf = new DefaultKafkaConsumerFactory<Integer, String>(props);
    ContainerProperties containerProps = new ContainerProperties(topic);
    containerProps.setSyncCommits(true);

    final CountDownLatch latch = new CountDownLatch(6);
    final BitSet bitSet = new BitSet(4);
    containerProps.setMessageListener((AcknowledgingMessageListener<Integer, String>) (message, ack) -> {
        logger.info("slow2: " + message);
        bitSet.set((int) (message.partition() * 3 + message.offset()));
        try {//from www .java  2  s.co  m
            Thread.sleep(1000);
        } catch (InterruptedException e) {
            Thread.currentThread().interrupt();
        }
        ack.acknowledge();
        latch.countDown();
    });
    containerProps.setPauseAfter(100);
    containerProps.setAckMode(ackMode);

    KafkaMessageListenerContainer<Integer, String> container = new KafkaMessageListenerContainer<>(cf,
            containerProps);
    container.setBeanName("testSlow2");
    container.start();

    Consumer<?, ?> consumer = spyOnConsumer(container);

    final CountDownLatch commitLatch = new CountDownLatch(7);

    willAnswer(invocation -> {

        try {
            return invocation.callRealMethod();
        } finally {
            commitLatch.countDown();
        }

    }).given(consumer).commitSync(any());

    ContainerTestUtils.waitForAssignment(container, embeddedKafka.getPartitionsPerTopic());

    Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
    ProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<Integer, String>(senderProps);
    KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf);
    template.setDefaultTopic(topic);
    template.sendDefault(0, "foo");
    template.sendDefault(2, "bar");
    template.sendDefault(0, "baz");
    template.sendDefault(2, "qux");
    template.flush();
    Thread.sleep(300);
    template.sendDefault(0, "fiz");
    template.sendDefault(2, "buz");
    template.flush();
    assertThat(latch.await(60, TimeUnit.SECONDS)).isTrue();
    assertThat(commitLatch.await(60, TimeUnit.SECONDS)).isTrue();
    assertThat(bitSet.cardinality()).isEqualTo(6);
    verify(consumer, atLeastOnce()).pause(anyObject());
    verify(consumer, atLeastOnce()).resume(anyObject());
    container.stop();
    logger.info("Stop " + this.testName.getMethodName() + ackMode);
}

From source file:org.roaringbitmap.TestRoaringBitmap.java

public void rTest(final int N) {
    System.out.println("rtest N=" + N);
    for (int gap = 1; gap <= 65536; gap *= 2) {
        final BitSet bs1 = new BitSet();
        final RoaringBitmap rb1 = new RoaringBitmap();
        for (int x = 0; x <= N; x += gap) {
            bs1.set(x);//w w  w .  j av a  2s .  co m
            rb1.add(x);
        }
        if (bs1.cardinality() != rb1.getCardinality())
            throw new RuntimeException("different card");
        if (!equals(bs1, rb1))
            throw new RuntimeException("basic  bug");
        for (int offset = 1; offset <= gap; offset *= 2) {
            final BitSet bs2 = new BitSet();
            final RoaringBitmap rb2 = new RoaringBitmap();
            for (int x = 0; x <= N; x += gap) {
                bs2.set(x + offset);
                rb2.add(x + offset);
            }
            if (bs2.cardinality() != rb2.getCardinality())
                throw new RuntimeException("different card");
            if (!equals(bs2, rb2))
                throw new RuntimeException("basic  bug");

            BitSet clonebs1;
            // testing AND
            clonebs1 = (BitSet) bs1.clone();
            clonebs1.and(bs2);
            if (!equals(clonebs1, RoaringBitmap.and(rb1, rb2)))
                throw new RuntimeException("bug and");
            {
                final RoaringBitmap t = rb1.clone();
                t.and(rb2);
                if (!equals(clonebs1, t))
                    throw new RuntimeException("bug inplace and");
                if (!t.equals(RoaringBitmap.and(rb1, rb2))) {
                    System.out.println(t.highLowContainer.getContainerAtIndex(0).getClass().getCanonicalName());
                    System.out.println(RoaringBitmap.and(rb1, rb2).highLowContainer.getContainerAtIndex(0)
                            .getClass().getCanonicalName());

                    throw new RuntimeException("bug inplace and");
                }
            }

            // testing OR
            clonebs1 = (BitSet) bs1.clone();
            clonebs1.or(bs2);

            if (!equals(clonebs1, RoaringBitmap.or(rb1, rb2)))
                throw new RuntimeException("bug or");
            {
                final RoaringBitmap t = rb1.clone();
                t.or(rb2);
                if (!equals(clonebs1, t))
                    throw new RuntimeException("bug or");
                if (!t.equals(RoaringBitmap.or(rb1, rb2)))
                    throw new RuntimeException("bug or");
                if (!t.toString().equals(RoaringBitmap.or(rb1, rb2).toString()))
                    throw new RuntimeException("bug or");

            }
            // testing XOR
            clonebs1 = (BitSet) bs1.clone();
            clonebs1.xor(bs2);
            if (!equals(clonebs1, RoaringBitmap.xor(rb1, rb2))) {
                throw new RuntimeException("bug xor");
            }
            {
                final RoaringBitmap t = rb1.clone();
                t.xor(rb2);
                if (!equals(clonebs1, t))
                    throw new RuntimeException("bug xor");
                if (!t.equals(RoaringBitmap.xor(rb1, rb2)))
                    throw new RuntimeException("bug xor");
            }
            // testing NOTAND
            clonebs1 = (BitSet) bs1.clone();
            clonebs1.andNot(bs2);
            if (!equals(clonebs1, RoaringBitmap.andNot(rb1, rb2))) {
                throw new RuntimeException("bug andnot");
            }
            clonebs1 = (BitSet) bs2.clone();
            clonebs1.andNot(bs1);
            if (!equals(clonebs1, RoaringBitmap.andNot(rb2, rb1))) {
                throw new RuntimeException("bug andnot");
            }
            {
                final RoaringBitmap t = rb2.clone();
                t.andNot(rb1);
                if (!equals(clonebs1, t)) {
                    throw new RuntimeException("bug inplace andnot");
                }
                final RoaringBitmap g = RoaringBitmap.andNot(rb2, rb1);
                if (!equals(clonebs1, g)) {
                    throw new RuntimeException("bug andnot");
                }
                if (!t.equals(g))
                    throw new RuntimeException("bug");
            }
            clonebs1 = (BitSet) bs1.clone();
            clonebs1.andNot(bs2);
            if (!equals(clonebs1, RoaringBitmap.andNot(rb1, rb2))) {
                throw new RuntimeException("bug andnot");
            }
            {
                final RoaringBitmap t = rb1.clone();
                t.andNot(rb2);
                if (!equals(clonebs1, t)) {
                    throw new RuntimeException("bug andnot");
                }
                final RoaringBitmap g = RoaringBitmap.andNot(rb1, rb2);
                if (!equals(clonebs1, g)) {
                    throw new RuntimeException("bug andnot");
                }
                if (!t.equals(g))
                    throw new RuntimeException("bug");
            }
        }
    }
}

From source file:org.springframework.kafka.listener.KafkaMessageListenerContainerTests.java

@Test
public void testSlowConsumerWithException() throws Exception {
    logger.info("Start " + this.testName.getMethodName());
    Map<String, Object> props = KafkaTestUtils.consumerProps("slow3", "false", embeddedKafka);
    DefaultKafkaConsumerFactory<Integer, String> cf = new DefaultKafkaConsumerFactory<Integer, String>(props);
    ContainerProperties containerProps = new ContainerProperties(topic3);

    final CountDownLatch latch = new CountDownLatch(18);
    final BitSet bitSet = new BitSet(6);
    final Map<String, AtomicInteger> faults = new HashMap<>();
    RetryingMessageListenerAdapter<Integer, String> adapter = new RetryingMessageListenerAdapter<>(
            new MessageListener<Integer, String>() {

                @Override/*from w w w .  j  a  v a 2 s. c  o m*/
                public void onMessage(ConsumerRecord<Integer, String> message) {
                    logger.info("slow3: " + message);
                    bitSet.set((int) (message.partition() * 3 + message.offset()));
                    String key = message.topic() + message.partition() + message.offset();
                    if (faults.get(key) == null) {
                        faults.put(key, new AtomicInteger(1));
                    } else {
                        faults.get(key).incrementAndGet();
                    }
                    latch.countDown(); // 3 per = 18
                    if (faults.get(key).get() < 3) { // succeed on the third attempt
                        throw new FooEx();
                    }
                }

            }, buildRetry(), null);
    containerProps.setMessageListener(adapter);
    containerProps.setPauseAfter(100);

    KafkaMessageListenerContainer<Integer, String> container = new KafkaMessageListenerContainer<>(cf,
            containerProps);
    container.setBeanName("testSlow3");

    container.start();
    Consumer<?, ?> consumer = spyOnConsumer(container);
    ContainerTestUtils.waitForAssignment(container, embeddedKafka.getPartitionsPerTopic());

    Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
    ProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
    KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf);
    template.setDefaultTopic(topic3);
    template.sendDefault(0, "foo");
    template.sendDefault(2, "bar");
    template.sendDefault(0, "baz");
    template.sendDefault(2, "qux");
    template.flush();
    Thread.sleep(300);
    template.sendDefault(0, "fiz");
    template.sendDefault(2, "buz");
    template.flush();
    assertThat(latch.await(60, TimeUnit.SECONDS)).isTrue();
    assertThat(bitSet.cardinality()).isEqualTo(6);
    verify(consumer, atLeastOnce()).pause(anyObject());
    verify(consumer, atLeastOnce()).resume(anyObject());
    container.stop();
    logger.info("Stop " + this.testName.getMethodName());
}

From source file:com.joliciel.jochre.graphics.SegmenterImpl.java

void splitShapes(SourceImage sourceImage, int fillFactor) {
    LOG.debug("########## splitShapes #########");
    // Cluster rows into rows of a similar height
    // Once we have this, we look for any shapes that are wider than average
    // and attempt to split them by looking for any bridges that are considerable thinner
    // than the stroke thickness and yet have big pixel counts on either side.

    // In order to split, we need four parameters
    // 1) minShapeWidth: the minimum shape width to consider for a split
    // 2) maxBridgeWidth: the maximum bridge width to use as a dividing bridge between two shapes when splitting
    // 3) minLetterWeight: the minimum pixel count that can represent a separate letter when splitting
    // 4) maxHorizontalOverlap: the maximum horizontal overlap between the left-hand and right-hand shape

    // These parameters are different for different font sizes
    // Therefore, we first need to group the rows on the image into clusters by height

    double imageShapeMean = sourceImage.getAverageShapeWidth();
    double maxWidthForSplit = imageShapeMean * 6.0; // avoid splitting horizontal rules!

    Set<Set<RowOfShapes>> rowClusters = sourceImage.getRowClusters();
    for (Set<RowOfShapes> rowCluster : rowClusters) {
        LOG.debug("Analysing row cluster");
        // 1) minShapeWidth: calculate the minimum shape width to be considered for splitting

        // first get the mean
        Mean meanWidth = new Mean();
        List<Shape> shapes = new ArrayList<Shape>();
        for (RowOfShapes row : rowCluster) {
            for (Shape shape : row.getShapes()) {
                meanWidth.increment(shape.getWidth());
                shapes.add(shape);// ww  w  .  j  a v  a  2s.  c om
            }
        }
        double shapeWidthMean = meanWidth.getResult();
        LOG.debug("Mean width: " + shapeWidthMean);
        meanWidth.clear();

        // Note: there is much trial and error for these numbers
        // but the general guideline is that it is easier to deal downstream
        // with bad joins than with bad splits
        // so we prefer to err on the upper side
        double fillFactorScale = 0.15 * fillFactor;
        double widthForSplittingLower = shapeWidthMean * (1.6 + fillFactorScale);
        double widthForSplittingUpper = shapeWidthMean * (2.2 + fillFactorScale);

        LOG.debug("widthForSplittingLower: " + widthForSplittingLower);
        LOG.debug("widthForSplittingUpper: " + widthForSplittingUpper);
        LOG.debug("maxWidthForSplit: " + maxWidthForSplit);
        List<Shape> candidates = new ArrayList<Shape>();
        for (RowOfShapes row : rowCluster) {
            LOG.debug("Next row " + row.getIndex());
            for (Shape shape : row.getShapes()) {
                LOG.trace("Shape width " + shape.getWidth());
                if (shape.getWidth() > widthForSplittingLower && shape.getWidth() < maxWidthForSplit) {
                    candidates.add(shape);
                    LOG.debug("Found candidate with width " + shape.getWidth() + ": " + shape);
                }
            }
        }

        if (candidates.size() > 0) {
            // we'll take a random sampling of shapes for the next parameters
            int sampleSize = 30;
            List<Shape> sample = this.getSample(rowCluster, sampleSize, true);

            Mean meanPixelCount = new Mean();
            Vectorizer vectorizer = this.graphicsService.getVectorizer();
            List<Integer> thicknesses = new ArrayList<Integer>();
            for (Shape shape : sample) {
                BitSet bitset = shape.getBlackAndWhiteBitSet(sourceImage.getSeparationThreshold(), 0);
                meanPixelCount.increment(bitset.cardinality());
                List<LineSegment> vectors = vectorizer.vectorize(shape);

                int height = shape.getHeight();
                int sampleStep = (int) Math.ceil(height / 8);

                for (LineSegment vector : vectors) {
                    List<Integer> vectorThickness = vector.getLineDefinition().findArrayListThickness(shape,
                            vector.getStartX(), vector.getStartY(), vector.getLength(),
                            sourceImage.getSeparationThreshold(), 0, sampleStep);
                    thicknesses.addAll(vectorThickness);
                }

            }

            double pixelCountMean = meanPixelCount.getResult();

            Mean meanThickness = new Mean();
            for (int thickness : thicknesses) {
                meanThickness.increment(thickness);
            }
            double thicknessMean = meanThickness.getResult();

            meanThickness = new Mean();
            for (int thickness : thicknesses) {
                if (thickness < thicknessMean)
                    meanThickness.increment(thickness);
            }

            thicknessMean = meanThickness.getResult();
            LOG.debug("thicknessMean: " + thicknessMean);

            // 2) maxBridgeWidth: the maximum bridge width to use as a dividing bridge between two shapes when splitting
            double maxBridgeWidthLower = thicknessMean * 0.5;
            double maxBridgeWidthUpper = thicknessMean * 0.8;
            LOG.debug("maxBridgeWidthLower: " + maxBridgeWidthLower);
            LOG.debug("maxBridgeWidthUpper: " + maxBridgeWidthUpper);

            // 3) minLetterWeight: the minimum pixel count that can represent a separate letter when splitting
            int minLetterWeight = (int) Math.floor(pixelCountMean / 4.0);
            LOG.debug("minLetterWeight: " + minLetterWeight);

            // 4) maxHorizontalOverlap: the maximum horizontal overlap between the left-hand and right-hand shape
            int maxOverlap = (int) Math.ceil(shapeWidthMean / 8.0);
            LOG.debug("maxOverlap: " + maxOverlap);

            Map<Shape, List<Shape>> shapesToSplit = new Hashtable<Shape, List<Shape>>();
            for (Shape candidate : candidates) {
                LOG.debug("Trying to split candidate " + candidate);
                for (int y = 0; y < candidate.getHeight(); y++) {
                    String line = "";
                    if (y == candidate.getMeanLine())
                        line += "M";
                    else if (y == candidate.getBaseLine())
                        line += "B";
                    else
                        line += y;
                    for (int x = 0; x < candidate.getWidth(); x++) {
                        if (candidate.isPixelBlack(x, y, sourceImage.getBlackThreshold()))
                            line += "x";
                        else
                            line += "o";
                    }
                    LOG.debug(line);
                }
                if (candidate.getHeight() < 3.0 * maxBridgeWidthUpper) {
                    LOG.debug("Shape too narrow - probably a long dash.");
                    continue;
                }
                int maxBridgeWidth;
                if (candidate.getWidth() > widthForSplittingUpper)
                    maxBridgeWidth = (int) Math.ceil(maxBridgeWidthUpper);
                else {
                    // since many bridges are thicker than expected
                    // add a rule that the thicker the bridge is, the wider the image needs to be
                    maxBridgeWidth = (int) Math.ceil(
                            maxBridgeWidthLower + (((double) candidate.getWidth() - widthForSplittingLower)
                                    / (widthForSplittingUpper - widthForSplittingLower)
                                    * (maxBridgeWidthUpper - maxBridgeWidthLower)));
                }
                List<Shape> splitShapes = this.splitShape(candidate, sourceImage, maxBridgeWidth,
                        minLetterWeight, maxOverlap);
                if (splitShapes.size() > 1) {
                    LOG.debug("Split found");
                    for (Shape splitShape : splitShapes) {
                        splitShape.setRow(candidate.getRow());
                    }
                    shapesToSplit.put(candidate, splitShapes);
                }
            }

            LOG.debug("Replacing shapes with split shapes");
            List<RowOfShapes> rowsToReorder = new ArrayList<RowOfShapes>();
            for (Shape shape : shapesToSplit.keySet()) {
                List<Shape> newShapes = shapesToSplit.get(shape);
                RowOfShapes row = shape.getRow();
                row.removeShape(shape);
                row.addShapes(newShapes);
                rowsToReorder.add(row);
            }

            for (RowOfShapes row : rowsToReorder)
                row.reorderShapes();
        }
    }
    LOG.debug("splitShapes complete");
}

From source file:org.springframework.kafka.listener.KafkaMessageListenerContainerTests.java

@Test
public void testSlowConsumerWithSlowThenExceptionThenGood() throws Exception {
    logger.info("Start " + this.testName.getMethodName());
    Map<String, Object> props = KafkaTestUtils.consumerProps("slow4", "false", embeddedKafka);
    DefaultKafkaConsumerFactory<Integer, String> cf = new DefaultKafkaConsumerFactory<Integer, String>(props);
    ContainerProperties containerProps = new ContainerProperties(topic4);
    final CountDownLatch latch = new CountDownLatch(18);
    final BitSet bitSet = new BitSet(6);
    final Map<String, AtomicInteger> faults = new HashMap<>();
    RetryingMessageListenerAdapter<Integer, String> adapter = new RetryingMessageListenerAdapter<>(
            new MessageListener<Integer, String>() {

                @Override//w w  w.  j  av  a 2 s .c o  m
                public void onMessage(ConsumerRecord<Integer, String> message) {
                    logger.info("slow4: " + message);
                    bitSet.set((int) (message.partition() * 4 + message.offset()));
                    String key = message.topic() + message.partition() + message.offset();
                    if (faults.get(key) == null) {
                        faults.put(key, new AtomicInteger(1));
                    } else {
                        faults.get(key).incrementAndGet();
                    }
                    latch.countDown(); // 3 per = 18
                    if (faults.get(key).get() == 1) {
                        try {
                            Thread.sleep(1000);
                        } catch (InterruptedException e) {
                            Thread.currentThread().interrupt();
                        }
                    }
                    if (faults.get(key).get() < 3) { // succeed on the third attempt
                        throw new FooEx();
                    }
                }

            }, buildRetry(), null);
    containerProps.setMessageListener(adapter);
    containerProps.setPauseAfter(100);

    KafkaMessageListenerContainer<Integer, String> container = new KafkaMessageListenerContainer<>(cf,
            containerProps);
    container.setBeanName("testSlow4");

    container.start();
    Consumer<?, ?> consumer = spyOnConsumer(container);
    ContainerTestUtils.waitForAssignment(container, embeddedKafka.getPartitionsPerTopic());

    Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
    ProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
    KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf);
    template.setDefaultTopic(topic4);
    template.sendDefault(0, "foo");
    template.sendDefault(2, "bar");
    template.sendDefault(0, "baz");
    template.sendDefault(2, "qux");
    template.flush();
    Thread.sleep(300);
    template.sendDefault(0, "fiz");
    template.sendDefault(2, "buz");
    template.flush();
    assertThat(latch.await(60, TimeUnit.SECONDS)).isTrue();
    assertThat(bitSet.cardinality()).isEqualTo(6);
    verify(consumer, atLeastOnce()).pause(anyObject());
    verify(consumer, atLeastOnce()).resume(anyObject());
    container.stop();
    logger.info("Stop " + this.testName.getMethodName());
}

From source file:org.alfresco.module.org_alfresco_module_rm.capability.RMAfterInvocationProvider.java

private Object[] decide(Authentication authentication, Object object, ConfigAttributeDefinition config,
        Object[] returnedObject) {
    // Assumption: value is not null
    BitSet incudedSet = new BitSet(returnedObject.length);

    List<ConfigAttributeDefintion> supportedDefinitions = extractSupportedDefinitions(config);

    if (supportedDefinitions.size() == 0) {
        return returnedObject;
    }//from  ww  w. ja  v a  2 s .  co m

    for (int i = 0, l = returnedObject.length; i < l; i++) {
        Object current = returnedObject[i];

        int parentReadCheck = checkRead(getParentReadCheckNode(current));
        int childReadChek = checkRead(getChildReadCheckNode(current));

        for (ConfigAttributeDefintion cad : supportedDefinitions) {
            incudedSet.set(i, true);
            NodeRef testNodeRef = null;
            if (cad.parent) {
                if (StoreRef.class.isAssignableFrom(current.getClass())) {
                    testNodeRef = null;
                } else if (NodeRef.class.isAssignableFrom(current.getClass())) {
                    testNodeRef = nodeService.getPrimaryParent((NodeRef) current).getParentRef();
                } else if (ChildAssociationRef.class.isAssignableFrom(current.getClass())) {
                    testNodeRef = ((ChildAssociationRef) current).getParentRef();
                } else if (PermissionCheckValue.class.isAssignableFrom(current.getClass())) {
                    NodeRef nodeRef = ((PermissionCheckValue) current).getNodeRef();
                    testNodeRef = nodeService.getPrimaryParent(nodeRef).getParentRef();
                } else {
                    throw new ACLEntryVoterException(
                            "The specified parameter is recognized: " + current.getClass());
                }
            } else {
                if (StoreRef.class.isAssignableFrom(current.getClass())) {
                    testNodeRef = nodeService.getRootNode((StoreRef) current);
                } else if (NodeRef.class.isAssignableFrom(current.getClass())) {
                    testNodeRef = (NodeRef) current;
                } else if (ChildAssociationRef.class.isAssignableFrom(current.getClass())) {
                    testNodeRef = ((ChildAssociationRef) current).getChildRef();
                } else if (PermissionCheckValue.class.isAssignableFrom(current.getClass())) {
                    testNodeRef = ((PermissionCheckValue) current).getNodeRef();
                } else {
                    throw new ACLEntryVoterException(
                            "The specified parameter is recognized: " + current.getClass());
                }
            }

            if (logger.isDebugEnabled()) {
                logger.debug("\t" + cad.typeString + " test on " + testNodeRef + " from "
                        + current.getClass().getName());
            }

            if (isUnfiltered(testNodeRef)) {
                continue;
            }

            int readCheck = childReadChek;
            if (cad.parent) {
                readCheck = parentReadCheck;
            }

            if (incudedSet.get(i) && (testNodeRef != null)
                    && (readCheck != AccessDecisionVoter.ACCESS_GRANTED)) {
                incudedSet.set(i, false);
            }

        }
    }

    if (incudedSet.cardinality() == returnedObject.length) {
        return returnedObject;
    } else {
        Object[] answer = new Object[incudedSet.cardinality()];
        for (int i = incudedSet.nextSetBit(0), p = 0; i >= 0; i = incudedSet.nextSetBit(++i), p++) {
            answer[p] = returnedObject[i];
        }
        return answer;
    }
}

From source file:org.apache.tez.runtime.library.common.writers.TestUnorderedPartitionedKVWriter.java

public void textTest(int numRegularRecords, int numPartitions, long availableMemory, int numLargeKeys,
        int numLargevalues, int numLargeKvPairs) throws IOException, InterruptedException {
    Partitioner partitioner = new HashPartitioner();
    ApplicationId appId = ApplicationId.newInstance(10000, 1);
    TezCounters counters = new TezCounters();
    String uniqueId = UUID.randomUUID().toString();
    OutputContext outputContext = createMockOutputContext(counters, appId, uniqueId);
    Random random = new Random();

    Configuration conf = createConfiguration(outputContext, Text.class, Text.class, shouldCompress, -1,
            HashPartitioner.class);
    CompressionCodec codec = null;// w  w  w . j a va2s  . c  om
    if (shouldCompress) {
        codec = new DefaultCodec();
        ((Configurable) codec).setConf(conf);
    }

    int numRecordsWritten = 0;

    Map<Integer, Multimap<String, String>> expectedValues = new HashMap<Integer, Multimap<String, String>>();
    for (int i = 0; i < numPartitions; i++) {
        expectedValues.put(i, LinkedListMultimap.<String, String>create());
    }

    UnorderedPartitionedKVWriter kvWriter = new UnorderedPartitionedKVWriterForTest(outputContext, conf,
            numPartitions, availableMemory);

    int sizePerBuffer = kvWriter.sizePerBuffer;

    BitSet partitionsWithData = new BitSet(numPartitions);
    Text keyText = new Text();
    Text valText = new Text();
    for (int i = 0; i < numRegularRecords; i++) {
        String key = createRandomString(Math.abs(random.nextInt(10)));
        String val = createRandomString(Math.abs(random.nextInt(20)));
        keyText.set(key);
        valText.set(val);
        int partition = partitioner.getPartition(keyText, valText, numPartitions);
        partitionsWithData.set(partition);
        expectedValues.get(partition).put(key, val);
        kvWriter.write(keyText, valText);
        numRecordsWritten++;
    }

    // Write Large key records
    for (int i = 0; i < numLargeKeys; i++) {
        String key = createRandomString(sizePerBuffer + Math.abs(random.nextInt(100)));
        String val = createRandomString(Math.abs(random.nextInt(20)));
        keyText.set(key);
        valText.set(val);
        int partition = partitioner.getPartition(keyText, valText, numPartitions);
        partitionsWithData.set(partition);
        expectedValues.get(partition).put(key, val);
        kvWriter.write(keyText, valText);
        numRecordsWritten++;
    }

    // Write Large val records
    for (int i = 0; i < numLargevalues; i++) {
        String key = createRandomString(Math.abs(random.nextInt(10)));
        String val = createRandomString(sizePerBuffer + Math.abs(random.nextInt(100)));
        keyText.set(key);
        valText.set(val);
        int partition = partitioner.getPartition(keyText, valText, numPartitions);
        partitionsWithData.set(partition);
        expectedValues.get(partition).put(key, val);
        kvWriter.write(keyText, valText);
        numRecordsWritten++;
    }

    // Write records where key + val are large (but both can fit in the buffer individually)
    for (int i = 0; i < numLargeKvPairs; i++) {
        String key = createRandomString(sizePerBuffer / 2 + Math.abs(random.nextInt(100)));
        String val = createRandomString(sizePerBuffer / 2 + Math.abs(random.nextInt(100)));
        keyText.set(key);
        valText.set(val);
        int partition = partitioner.getPartition(keyText, valText, numPartitions);
        partitionsWithData.set(partition);
        expectedValues.get(partition).put(key, val);
        kvWriter.write(keyText, valText);
        numRecordsWritten++;
    }

    List<Event> events = kvWriter.close();
    verify(outputContext, never()).fatalError(any(Throwable.class), any(String.class));

    TezCounter outputLargeRecordsCounter = counters.findCounter(TaskCounter.OUTPUT_LARGE_RECORDS);
    assertEquals(numLargeKeys + numLargevalues + numLargeKvPairs, outputLargeRecordsCounter.getValue());

    // Validate the event
    assertEquals(1, events.size());
    assertTrue(events.get(0) instanceof CompositeDataMovementEvent);
    CompositeDataMovementEvent cdme = (CompositeDataMovementEvent) events.get(0);
    assertEquals(0, cdme.getSourceIndexStart());
    assertEquals(numPartitions, cdme.getCount());
    DataMovementEventPayloadProto eventProto = DataMovementEventPayloadProto
            .parseFrom(ByteString.copyFrom(cdme.getUserPayload()));
    assertFalse(eventProto.hasData());
    BitSet emptyPartitionBits = null;
    if (partitionsWithData.cardinality() != numPartitions) {
        assertTrue(eventProto.hasEmptyPartitions());
        byte[] emptyPartitions = TezCommonUtils
                .decompressByteStringToByteArray(eventProto.getEmptyPartitions());
        emptyPartitionBits = TezUtilsInternal.fromByteArray(emptyPartitions);
        assertEquals(numPartitions - partitionsWithData.cardinality(), emptyPartitionBits.cardinality());
    } else {
        assertFalse(eventProto.hasEmptyPartitions());
        emptyPartitionBits = new BitSet(numPartitions);
    }
    assertEquals(HOST_STRING, eventProto.getHost());
    assertEquals(SHUFFLE_PORT, eventProto.getPort());
    assertEquals(uniqueId, eventProto.getPathComponent());

    // Verify the data
    // Verify the actual data
    TezTaskOutput taskOutput = new TezTaskOutputFiles(conf, uniqueId);
    Path outputFilePath = null;
    Path spillFilePath = null;
    try {
        outputFilePath = taskOutput.getOutputFile();
    } catch (DiskErrorException e) {
        if (numRecordsWritten > 0) {
            fail();
        } else {
            // Record checking not required.
            return;
        }
    }
    try {
        spillFilePath = taskOutput.getOutputIndexFile();
    } catch (DiskErrorException e) {
        if (numRecordsWritten > 0) {
            fail();
        } else {
            // Record checking not required.
            return;
        }
    }

    // Special case for 0 records.
    TezSpillRecord spillRecord = new TezSpillRecord(spillFilePath, conf);
    DataInputBuffer keyBuffer = new DataInputBuffer();
    DataInputBuffer valBuffer = new DataInputBuffer();
    Text keyDeser = new Text();
    Text valDeser = new Text();
    for (int i = 0; i < numPartitions; i++) {
        if (emptyPartitionBits.get(i)) {
            continue;
        }
        TezIndexRecord indexRecord = spillRecord.getIndex(i);
        FSDataInputStream inStream = FileSystem.getLocal(conf).open(outputFilePath);
        inStream.seek(indexRecord.getStartOffset());
        IFile.Reader reader = new IFile.Reader(inStream, indexRecord.getPartLength(), codec, null, null, false,
                0, -1);
        while (reader.nextRawKey(keyBuffer)) {
            reader.nextRawValue(valBuffer);
            keyDeser.readFields(keyBuffer);
            valDeser.readFields(valBuffer);
            int partition = partitioner.getPartition(keyDeser, valDeser, numPartitions);
            assertTrue(expectedValues.get(partition).remove(keyDeser.toString(), valDeser.toString()));
        }
        inStream.close();
    }
    for (int i = 0; i < numPartitions; i++) {
        assertEquals(0, expectedValues.get(i).size());
        expectedValues.remove(i);
    }
    assertEquals(0, expectedValues.size());
}

From source file:org.alfresco.repo.security.permissions.impl.acegi.ACLEntryAfterInvocationProvider.java

@SuppressWarnings("rawtypes")
private Object[] decide(Authentication authentication, Object object, ConfigAttributeDefinition config,
        Object[] returnedObject) throws AccessDeniedException {
    // Assumption: value is not null
    BitSet incudedSet = new BitSet(returnedObject.length);

    List<ConfigAttributeDefintion> supportedDefinitions = extractSupportedDefinitions(config);

    if (supportedDefinitions.size() == 0) {
        return returnedObject;
    }/*from ww  w. ja va2s .  c o m*/

    for (int i = 0, l = returnedObject.length; i < l; i++) {
        Object current = returnedObject[i];
        for (ConfigAttributeDefintion cad : supportedDefinitions) {
            incudedSet.set(i, true);
            NodeRef testNodeRef = null;
            if (cad.typeString.equals(AFTER_ACL_NODE)) {
                if (StoreRef.class.isAssignableFrom(current.getClass())) {
                    testNodeRef = nodeService.getRootNode((StoreRef) current);
                } else if (NodeRef.class.isAssignableFrom(current.getClass())) {
                    testNodeRef = (NodeRef) current;
                } else if (ChildAssociationRef.class.isAssignableFrom(current.getClass())) {
                    testNodeRef = ((ChildAssociationRef) current).getChildRef();
                } else if (Pair.class.isAssignableFrom(current.getClass())) {
                    testNodeRef = (NodeRef) ((Pair) current).getSecond();
                } else if (PermissionCheckValue.class.isAssignableFrom(current.getClass())) {
                    testNodeRef = ((PermissionCheckValue) current).getNodeRef();
                } else {
                    throw new ACLEntryVoterException(
                            "The specified parameter is recognized: " + current.getClass());
                }
            } else if (cad.typeString.equals(AFTER_ACL_PARENT)) {
                if (StoreRef.class.isAssignableFrom(current.getClass())) {
                    testNodeRef = null;
                } else if (NodeRef.class.isAssignableFrom(current.getClass())) {
                    testNodeRef = nodeService.getPrimaryParent((NodeRef) current).getParentRef();
                } else if (ChildAssociationRef.class.isAssignableFrom(current.getClass())) {
                    testNodeRef = ((ChildAssociationRef) current).getParentRef();
                } else if (Pair.class.isAssignableFrom(current.getClass())) {
                    testNodeRef = (NodeRef) ((Pair) current).getSecond();
                } else if (PermissionCheckValue.class.isAssignableFrom(current.getClass())) {
                    NodeRef nodeRef = ((PermissionCheckValue) current).getNodeRef();
                    testNodeRef = nodeService.getPrimaryParent(nodeRef).getParentRef();
                } else {
                    throw new ACLEntryVoterException(
                            "The specified parameter is recognized: " + current.getClass());
                }
            }

            if (log.isDebugEnabled()) {
                log.debug("\t" + cad.typeString + " test on " + testNodeRef + " from "
                        + current.getClass().getName());
            }

            if (isUnfiltered(testNodeRef)) {
                continue;
            }

            if (incudedSet.get(i) && (testNodeRef != null) && (permissionService.hasPermission(testNodeRef,
                    cad.required.toString()) == AccessStatus.DENIED)) {
                incudedSet.set(i, false);
            }

        }
    }

    if (incudedSet.cardinality() == returnedObject.length) {
        return returnedObject;
    } else {
        Object[] answer = new Object[incudedSet.cardinality()];
        for (int i = incudedSet.nextSetBit(0), p = 0; i >= 0; i = incudedSet.nextSetBit(++i), p++) {
            answer[p] = returnedObject[i];
        }
        return answer;
    }
}