List of usage examples for java.util BitSet set
public void set(int bitIndex)
From source file:org.apache.hadoop.mapred.split.TestGroupedSplits.java
@Test(timeout = 10000) public void testFormat() throws Exception { JobConf job = new JobConf(defaultConf); Random random = new Random(); long seed = random.nextLong(); LOG.info("seed = " + seed); random.setSeed(seed);/*from ww w. j a va 2s. c o m*/ localFs.delete(workDir, true); FileInputFormat.setInputPaths(job, workDir); final int length = 10000; final int numFiles = 10; createFiles(length, numFiles, random); // create a combined split for the files TextInputFormat wrappedFormat = new TextInputFormat(); wrappedFormat.configure(job); TezGroupedSplitsInputFormat<LongWritable, Text> format = new TezGroupedSplitsInputFormat<LongWritable, Text>(); format.setConf(job); format.setDesiredNumberOfSplits(1); format.setInputFormat(wrappedFormat); LongWritable key = new LongWritable(); Text value = new Text(); for (int i = 0; i < 3; i++) { int numSplits = random.nextInt(length / 20) + 1; LOG.info("splitting: requesting = " + numSplits); InputSplit[] splits = format.getSplits(job, numSplits); LOG.info("splitting: got = " + splits.length); // we should have a single split as the length is comfortably smaller than // the block size assertEquals("We got more than one splits!", 1, splits.length); InputSplit split = splits[0]; assertEquals("It should be TezGroupedSplit", TezGroupedSplit.class, split.getClass()); // check the split BitSet bits = new BitSet(length); LOG.debug("split= " + split); RecordReader<LongWritable, Text> reader = format.getRecordReader(split, job, voidReporter); try { int count = 0; while (reader.next(key, value)) { int v = Integer.parseInt(value.toString()); LOG.debug("read " + v); if (bits.get(v)) { LOG.warn("conflict with " + v + " at position " + reader.getPos()); } assertFalse("Key in multiple partitions.", bits.get(v)); bits.set(v); count++; } LOG.info("splits=" + split + " count=" + count); } finally { reader.close(); } assertEquals("Some keys in no partition.", length, bits.cardinality()); } }
From source file:org.apache.hadoop.mapreduce.lib.input.TestCombineTextInputFormat.java
@Test(timeout = 10000) public void testFormat() throws Exception { Job job = Job.getInstance(new Configuration(defaultConf)); Random random = new Random(); long seed = random.nextLong(); LOG.info("seed = " + seed); random.setSeed(seed);/*from w w w . ja v a 2 s . c o m*/ localFs.delete(workDir, true); FileInputFormat.setInputPaths(job, workDir); final int length = 10000; final int numFiles = 10; // create files with various lengths createFiles(length, numFiles, random); // create a combined split for the files CombineTextInputFormat format = new CombineTextInputFormat(); for (int i = 0; i < 3; i++) { int numSplits = random.nextInt(length / 20) + 1; LOG.info("splitting: requesting = " + numSplits); List<InputSplit> splits = format.getSplits(job); LOG.info("splitting: got = " + splits.size()); // we should have a single split as the length is comfortably smaller than // the block size assertEquals("We got more than one splits!", 1, splits.size()); InputSplit split = splits.get(0); assertEquals("It should be CombineFileSplit", CombineFileSplit.class, split.getClass()); // check the split BitSet bits = new BitSet(length); LOG.debug("split= " + split); TaskAttemptContext context = MapReduceTestUtil.createDummyMapTaskAttemptContext(job.getConfiguration()); RecordReader<LongWritable, Text> reader = format.createRecordReader(split, context); assertEquals("reader class is CombineFileRecordReader.", CombineFileRecordReader.class, reader.getClass()); MapContext<LongWritable, Text, LongWritable, Text> mcontext = new MapContextImpl<LongWritable, Text, LongWritable, Text>( job.getConfiguration(), context.getTaskAttemptID(), reader, null, null, MapReduceTestUtil.createDummyReporter(), split); reader.initialize(split, mcontext); try { int count = 0; while (reader.nextKeyValue()) { LongWritable key = reader.getCurrentKey(); assertNotNull("Key should not be null.", key); Text value = reader.getCurrentValue(); final int v = Integer.parseInt(value.toString()); LOG.debug("read " + v); assertFalse("Key in multiple partitions.", bits.get(v)); bits.set(v); count++; } LOG.debug("split=" + split + " count=" + count); } finally { reader.close(); } assertEquals("Some keys in no partition.", length, bits.cardinality()); } }
From source file:org.omnaest.utils.table.TableTest.java
@SuppressWarnings("cast") @Test// www .j ava2 s .c o m public void testRow() { Table<String> table = this.newTable(new String[][] { { "a", "b", "c" }, { "d", "e", "f" } }, String.class); String[] values = new String[] { "a", "b", "c" }; table.addRowElements(values); { Row<String> row = table.row(0); assertEquals(Arrays.asList(values), ListUtils.valueOf((Iterable<String>) row)); } { Row<String> row = table.row(1); assertEquals(Arrays.asList("d", "e", "f"), ListUtils.valueOf((Iterable<String>) row)); } { Row<String> row = table.row(2); assertEquals(Arrays.asList("a", "b", "c"), ListUtils.valueOf((Iterable<String>) row)); } { Row<String> row = table.row(0); row.setElement(1, "b2"); assertEquals("b2", row.getElement(1)); } { assertNull(table.row(-1)); } { BitSet indexFilter = new BitSet(); indexFilter.set(1); indexFilter.set(2); Iterable<Row<String>> rows = table.rows(indexFilter); assertEquals(2, IterableUtils.size(rows)); assertEquals(table.row(1).id(), IterableUtils.elementAt(rows, 0).id()); assertEquals(table.row(2).id(), IterableUtils.elementAt(rows, 1).id()); } }
From source file:org.apache.hadoop.mapred.TestKeyValueTextInputFormat.java
public void testFormat() throws Exception { JobConf job = new JobConf(); Path file = new Path(workDir, "test.txt"); // A reporter that does nothing Reporter reporter = Reporter.NULL;//from w w w . j a v a 2 s.com int seed = new Random().nextInt(); LOG.info("seed = " + seed); Random random = new Random(seed); localFs.delete(workDir, true); FileInputFormat.setInputPaths(job, workDir); // for a variety of lengths for (int length = 0; length < MAX_LENGTH; length += random.nextInt(MAX_LENGTH / 10) + 1) { LOG.debug("creating; entries = " + length); // create a file with length entries Writer writer = new OutputStreamWriter(localFs.create(file)); try { for (int i = 0; i < length; i++) { writer.write(Integer.toString(i * 2)); writer.write("\t"); writer.write(Integer.toString(i)); writer.write("\n"); } } finally { writer.close(); } // try splitting the file in a variety of sizes KeyValueTextInputFormat format = new KeyValueTextInputFormat(); format.configure(job); for (int i = 0; i < 3; i++) { int numSplits = random.nextInt(MAX_LENGTH / 20) + 1; LOG.debug("splitting: requesting = " + numSplits); InputSplit[] splits = format.getSplits(job, numSplits); LOG.debug("splitting: got = " + splits.length); // check each split BitSet bits = new BitSet(length); for (int j = 0; j < splits.length; j++) { LOG.debug("split[" + j + "]= " + splits[j]); RecordReader<Text, Text> reader = format.getRecordReader(splits[j], job, reporter); Class readerClass = reader.getClass(); assertEquals("reader class is KeyValueLineRecordReader.", KeyValueLineRecordReader.class, readerClass); Text key = reader.createKey(); Class keyClass = key.getClass(); Text value = reader.createValue(); Class valueClass = value.getClass(); assertEquals("Key class is Text.", Text.class, keyClass); assertEquals("Value class is Text.", Text.class, valueClass); try { int count = 0; while (reader.next(key, value)) { int v = Integer.parseInt(value.toString()); LOG.debug("read " + v); if (bits.get(v)) { LOG.warn("conflict with " + v + " in split " + j + " at position " + reader.getPos()); } assertFalse("Key in multiple partitions.", bits.get(v)); bits.set(v); count++; } LOG.debug("splits[" + j + "]=" + splits[j] + " count=" + count); } finally { reader.close(); } } assertEquals("Some keys in no partition.", length, bits.cardinality()); } } }
From source file:org.springframework.kafka.listener.KafkaMessageListenerContainerTests.java
@Test public void testSlowConsumerWithException() throws Exception { logger.info("Start " + this.testName.getMethodName()); Map<String, Object> props = KafkaTestUtils.consumerProps("slow3", "false", embeddedKafka); DefaultKafkaConsumerFactory<Integer, String> cf = new DefaultKafkaConsumerFactory<Integer, String>(props); ContainerProperties containerProps = new ContainerProperties(topic3); final CountDownLatch latch = new CountDownLatch(18); final BitSet bitSet = new BitSet(6); final Map<String, AtomicInteger> faults = new HashMap<>(); RetryingMessageListenerAdapter<Integer, String> adapter = new RetryingMessageListenerAdapter<>( new MessageListener<Integer, String>() { @Override/* w w w. j a va 2 s. com*/ public void onMessage(ConsumerRecord<Integer, String> message) { logger.info("slow3: " + message); bitSet.set((int) (message.partition() * 3 + message.offset())); String key = message.topic() + message.partition() + message.offset(); if (faults.get(key) == null) { faults.put(key, new AtomicInteger(1)); } else { faults.get(key).incrementAndGet(); } latch.countDown(); // 3 per = 18 if (faults.get(key).get() < 3) { // succeed on the third attempt throw new FooEx(); } } }, buildRetry(), null); containerProps.setMessageListener(adapter); containerProps.setPauseAfter(100); KafkaMessageListenerContainer<Integer, String> container = new KafkaMessageListenerContainer<>(cf, containerProps); container.setBeanName("testSlow3"); container.start(); Consumer<?, ?> consumer = spyOnConsumer(container); ContainerTestUtils.waitForAssignment(container, embeddedKafka.getPartitionsPerTopic()); Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka); ProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps); KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf); template.setDefaultTopic(topic3); template.sendDefault(0, "foo"); template.sendDefault(2, "bar"); template.sendDefault(0, "baz"); template.sendDefault(2, "qux"); template.flush(); Thread.sleep(300); template.sendDefault(0, "fiz"); template.sendDefault(2, "buz"); template.flush(); assertThat(latch.await(60, TimeUnit.SECONDS)).isTrue(); assertThat(bitSet.cardinality()).isEqualTo(6); verify(consumer, atLeastOnce()).pause(anyObject()); verify(consumer, atLeastOnce()).resume(anyObject()); container.stop(); logger.info("Stop " + this.testName.getMethodName()); }
From source file:org.springframework.kafka.listener.KafkaMessageListenerContainerTests.java
@Test public void testSlowConsumerWithSlowThenExceptionThenGood() throws Exception { logger.info("Start " + this.testName.getMethodName()); Map<String, Object> props = KafkaTestUtils.consumerProps("slow4", "false", embeddedKafka); DefaultKafkaConsumerFactory<Integer, String> cf = new DefaultKafkaConsumerFactory<Integer, String>(props); ContainerProperties containerProps = new ContainerProperties(topic4); final CountDownLatch latch = new CountDownLatch(18); final BitSet bitSet = new BitSet(6); final Map<String, AtomicInteger> faults = new HashMap<>(); RetryingMessageListenerAdapter<Integer, String> adapter = new RetryingMessageListenerAdapter<>( new MessageListener<Integer, String>() { @Override// w ww . ja v a2s .com public void onMessage(ConsumerRecord<Integer, String> message) { logger.info("slow4: " + message); bitSet.set((int) (message.partition() * 4 + message.offset())); String key = message.topic() + message.partition() + message.offset(); if (faults.get(key) == null) { faults.put(key, new AtomicInteger(1)); } else { faults.get(key).incrementAndGet(); } latch.countDown(); // 3 per = 18 if (faults.get(key).get() == 1) { try { Thread.sleep(1000); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } } if (faults.get(key).get() < 3) { // succeed on the third attempt throw new FooEx(); } } }, buildRetry(), null); containerProps.setMessageListener(adapter); containerProps.setPauseAfter(100); KafkaMessageListenerContainer<Integer, String> container = new KafkaMessageListenerContainer<>(cf, containerProps); container.setBeanName("testSlow4"); container.start(); Consumer<?, ?> consumer = spyOnConsumer(container); ContainerTestUtils.waitForAssignment(container, embeddedKafka.getPartitionsPerTopic()); Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka); ProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps); KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf); template.setDefaultTopic(topic4); template.sendDefault(0, "foo"); template.sendDefault(2, "bar"); template.sendDefault(0, "baz"); template.sendDefault(2, "qux"); template.flush(); Thread.sleep(300); template.sendDefault(0, "fiz"); template.sendDefault(2, "buz"); template.flush(); assertThat(latch.await(60, TimeUnit.SECONDS)).isTrue(); assertThat(bitSet.cardinality()).isEqualTo(6); verify(consumer, atLeastOnce()).pause(anyObject()); verify(consumer, atLeastOnce()).resume(anyObject()); container.stop(); logger.info("Stop " + this.testName.getMethodName()); }
From source file:com.jefftharris.passwdsafe.PasswdSafe.java
@Override public boolean onPrepareOptionsMenu(Menu menu) { final BitSet options = new BitSet(); options.set(MENU_BIT_HAS_CLOSE); itsFileDataFrag.useFileData(new PasswdFileDataUser() { @Override/*from w w w . j a v a2 s . c o m*/ public void useFileData(@NonNull PasswdFileData fileData) { boolean fileEditable = fileData.canEdit(); switch (itsCurrViewMode) { case VIEW_LIST: { options.set(MENU_BIT_CAN_ADD, fileEditable); options.set(MENU_BIT_HAS_SEARCH, true); if (fileEditable) { options.set(MENU_BIT_HAS_FILE_OPS, true); options.set(MENU_BIT_HAS_FILE_CHANGE_PASSWORD, fileData.isNotYubikey()); options.set(MENU_BIT_HAS_FILE_PROTECT, true); options.set(MENU_BIT_PROTECT_ALL, itsLocation.getGroups().isEmpty()); } if (fileData.canDelete()) { options.set(MENU_BIT_HAS_FILE_OPS, true); options.set(MENU_BIT_HAS_FILE_DELETE, true); } break; } case VIEW_RECORD: { options.set(MENU_BIT_CAN_ADD, fileEditable); break; } case INIT: case FILE_OPEN: case FILE_NEW: case VIEW_ABOUT: case VIEW_EXPIRATION: case VIEW_POLICY_LIST: case VIEW_PREFERENCES: { break; } case EDIT_RECORD: case CHANGING_PASSWORD: { options.set(MENU_BIT_HAS_CLOSE, false); break; } } } }); MenuItem item = menu.findItem(R.id.menu_add); if (item != null) { item.setVisible(options.get(MENU_BIT_CAN_ADD)); } item = menu.findItem(R.id.menu_close); if (item != null) { item.setVisible(options.get(MENU_BIT_HAS_CLOSE)); } item = menu.findItem(R.id.menu_file_ops); if (item != null) { item.setVisible(options.get(MENU_BIT_HAS_FILE_OPS)); } item = menu.findItem(R.id.menu_file_change_password); if (item != null) { item.setEnabled(options.get(MENU_BIT_HAS_FILE_CHANGE_PASSWORD)); } if (options.get(MENU_BIT_HAS_FILE_OPS)) { boolean hasProtect = options.get(MENU_BIT_HAS_FILE_PROTECT); boolean viewProtectAll = options.get(MENU_BIT_PROTECT_ALL); item = menu.findItem(R.id.menu_file_protect_records); if (item != null) { item.setEnabled(hasProtect); item.setTitle(viewProtectAll ? R.string.protect_all : R.string.protect_group); } item = menu.findItem(R.id.menu_file_unprotect_records); if (item != null) { item.setEnabled(hasProtect); item.setTitle(viewProtectAll ? R.string.unprotect_all : R.string.unprotect_group); } item = menu.findItem(R.id.menu_file_delete); if (item != null) { item.setEnabled(options.get(MENU_BIT_HAS_FILE_DELETE)); } } item = menu.findItem(R.id.menu_search); if (item != null) { item.setVisible(options.get(MENU_BIT_HAS_SEARCH)); } return super.onPrepareOptionsMenu(menu); }
From source file:org.apache.hadoop.hbase.security.visibility.VisibilityController.java
private Filter createVisibilityLabelFilter(HRegion region, Authorizations authorizations) throws IOException { Map<ByteRange, Integer> cfVsMaxVersions = new HashMap<ByteRange, Integer>(); for (HColumnDescriptor hcd : region.getTableDesc().getFamilies()) { cfVsMaxVersions.put(new SimpleByteRange(hcd.getName()), hcd.getMaxVersions()); }/* w w w . j a v a 2 s . c om*/ if (authorizations == null) { // No Authorizations present for this scan/Get! // In case of system tables other than "labels" just scan with out visibility check and // filtering. Checking visibility labels for META and NAMESPACE table is not needed. TableName table = region.getRegionInfo().getTable(); if (table.isSystemTable() && !table.equals(LABELS_TABLE_NAME)) { return null; } } else { for (String label : authorizations.getLabels()) { if (!VisibilityLabelsValidator.isValidLabel(label)) { throw new IllegalArgumentException("Invalid authorization label : " + label + ". Authorizations cannot contain '(', ')' ,'&' ,'|', '!'" + " and cannot be empty"); } } } Filter visibilityLabelFilter = null; if (this.scanLabelGenerators != null) { List<String> labels = null; for (ScanLabelGenerator scanLabelGenerator : this.scanLabelGenerators) { try { // null authorizations to be handled inside SLG impl. labels = scanLabelGenerator.getLabels(getActiveUser(), authorizations); labels = (labels == null) ? new ArrayList<String>() : labels; authorizations = new Authorizations(labels); } catch (Throwable t) { LOG.error(t); throw new IOException(t); } } int labelsCount = this.visibilityManager.getLabelsCount(); BitSet bs = new BitSet(labelsCount + 1); // ordinal is index 1 based if (labels != null) { for (String label : labels) { int labelOrdinal = this.visibilityManager.getLabelOrdinal(label); if (labelOrdinal != 0) { bs.set(labelOrdinal); } } } visibilityLabelFilter = new VisibilityLabelFilter(bs, cfVsMaxVersions); } return visibilityLabelFilter; }
From source file:org.zkoss.poi.ss.format.CellNumberFormatter.java
/** {@inheritDoc} */ public void formatValue(StringBuffer toAppendTo, Object valueObject) { double value = ((Number) valueObject).doubleValue(); value *= scale;//from ww w . j a v a 2 s . c om // For negative numbers: // - If the cell format has a negative number format, this method // is called with a positive value and the number format has // the negative formatting required, e.g. minus sign or brackets. // - If the cell format does not have a negative number format, // this method is called with a negative value and the number is // formatted with a minus sign at the start. boolean negative = value < 0; if (negative) value = -value; // Split out the fractional part if we need to print a fraction double fractional = 0; if (slash != null) { if (improperFraction) { fractional = value; value = 0; } else { fractional = value % 1.0; //noinspection SillyAssignment value = (long) value; } } Set<StringMod> mods = new TreeSet<StringMod>(); StringBuffer output = new StringBuffer(desc); if (exponent != null) { writeScientific(value, output, mods); } else if (improperFraction) { writeFraction(value, null, fractional, output, mods); } else { StringBuffer result = new StringBuffer(); Formatter f = new Formatter(result, locale); //ZSS-68 f.format(locale, printfFmt, value); //ZSS-68 if (numerator == null) { writeFractional(result, output); writeInteger(result, output, integerSpecials, mods, integerCommas, false); } else { writeFraction(value, result, fractional, output, mods); } } // Now strip out any remaining '#'s and add any pending text ... ListIterator<Special> it = specials.listIterator(); Iterator<StringMod> changes = mods.iterator(); StringMod nextChange = (changes.hasNext() ? changes.next() : null); int adjust = 0; BitSet deletedChars = new BitSet(); // records chars already deleted final String groupSeparator = "" + Formatters.getGroupingSeparator(locale); //ZSS-68 while (it.hasNext()) { Special s = it.next(); int adjustedPos = s.pos + adjust; if (!deletedChars.get(s.pos) && output.charAt(adjustedPos) == '#') { output.deleteCharAt(adjustedPos); adjust--; deletedChars.set(s.pos); } while (nextChange != null && s == nextChange.special) { int lenBefore = output.length(); int modPos = s.pos + adjust; int posTweak = 0; switch (nextChange.op) { case StringMod.AFTER: // ignore adding a comma after a deleted char (which was a '#') if (nextChange.toAdd.equals(groupSeparator) && deletedChars.get(s.pos)) //20110321, henrichen@zkoss.org: respect current locale break; posTweak = 1; //noinspection fallthrough case StringMod.BEFORE: output.insert(modPos + posTweak, nextChange.toAdd); break; case StringMod.REPLACE: int delPos = s.pos; // delete starting pos in original coordinates if (!nextChange.startInclusive) { delPos++; modPos++; } // Skip over anything already deleted while (deletedChars.get(delPos)) { delPos++; modPos++; } int delEndPos = nextChange.end.pos; // delete end point in original if (nextChange.endInclusive) delEndPos++; int modEndPos = delEndPos + adjust; // delete end point in current if (modPos < modEndPos) { if (nextChange.toAdd == "") output.delete(modPos, modEndPos); else { char fillCh = nextChange.toAdd.charAt(0); for (int i = modPos; i < modEndPos; i++) output.setCharAt(i, fillCh); } deletedChars.set(delPos, delEndPos); } break; default: throw new IllegalStateException("Unknown op: " + nextChange.op); } adjust += output.length() - lenBefore; if (changes.hasNext()) nextChange = changes.next(); else nextChange = null; } } // Finally, add it to the string if (negative) toAppendTo.append('-'); toAppendTo.append(output); }
From source file:com.microsoft.azure.management.datalake.store.uploader.UploadMetadata.java
/** * Verifies the given metadata for consistency. Checks include: * Completeness//w w w. jav a 2s. c o m * Existence and consistency with local file * Segment data consistency * * @throws InvalidMetadataException Thrown if the metadata is invalid. */ public void validateConsistency() throws InvalidMetadataException { if (this.segments == null || this.segments.length != this.segmentCount) { throw new InvalidMetadataException("Inconsistent number of segments"); } long sum = 0; int lastSegmentNumber = -1; BitSet segments = new BitSet(this.segmentCount); for (UploadSegmentMetadata segment : this.segments) { if (segment.getSegmentNumber() < 0 || segment.getSegmentNumber() >= this.segmentCount) { throw new InvalidMetadataException(MessageFormat.format( "Segment numbers must be at least 0 and less than {0}. Found segment number {1}.", this.segmentCount, segment.getSegmentNumber())); } if (segment.getSegmentNumber() <= lastSegmentNumber) { throw new InvalidMetadataException(MessageFormat.format("Segment number {0} appears out of order.", segment.getSegmentNumber())); } if (segments.get(segment.getSegmentNumber())) { throw new InvalidMetadataException( MessageFormat.format("Segment number {0} appears twice", segment.getSegmentNumber())); } if (segment.getOffset() != sum) { throw new InvalidMetadataException(MessageFormat.format( "Segment number {0} has an invalid starting offset ({1}). Expected {2}.", segment.getSegmentNumber(), segment.getOffset(), sum)); } segments.set(segment.getSegmentNumber()); sum += segment.getLength(); lastSegmentNumber = segment.getSegmentNumber(); } if (sum != this.fileLength) { throw new InvalidMetadataException( "The individual segment lengths do not add up to the input File length"); } }