List of usage examples for java.util BitSet BitSet
private BitSet(long[] words)
From source file:org.efaps.admin.datamodel.attributetype.BitEnumType.java
/** * @param _bitIndex bitindex the integer value is wanted for * @return integer value representing the bitindex */// w w w. j av a 2 s.c om public static int getInt4Index(final int _bitIndex) { final BitSet bitSet = new BitSet(_bitIndex + 1); bitSet.set(_bitIndex); int ret = 0; for (int i = 0; i < bitSet.length(); ++i) { ret += bitSet.get(i) ? 1 << i : 0; } return ret; }
From source file:org.apache.hadoop.mapred.TestMultiFileInputFormat.java
public void testFormat() throws IOException { if (LOG.isInfoEnabled()) { LOG.info("Test started"); LOG.info("Max split count = " + MAX_SPLIT_COUNT); LOG.info("Split count increment = " + SPLIT_COUNT_INCR); LOG.info("Max bytes per file = " + MAX_BYTES); LOG.info("Max number of files = " + MAX_NUM_FILES); LOG.info("Number of files increment = " + NUM_FILES_INCR); }//from w w w .java 2s .c o m MultiFileInputFormat<Text, Text> format = new DummyMultiFileInputFormat(); FileSystem fs = FileSystem.getLocal(job); for (int numFiles = 1; numFiles < MAX_NUM_FILES; numFiles += (NUM_FILES_INCR / 2) + rand.nextInt(NUM_FILES_INCR / 2)) { Path dir = initFiles(fs, numFiles, -1); BitSet bits = new BitSet(numFiles); for (int i = 1; i < MAX_SPLIT_COUNT; i += rand.nextInt(SPLIT_COUNT_INCR) + 1) { LOG.info("Running for Num Files=" + numFiles + ", split count=" + i); MultiFileSplit[] splits = (MultiFileSplit[]) format.getSplits(job, i); bits.clear(); for (MultiFileSplit split : splits) { long splitLength = 0; for (Path p : split.getPaths()) { long length = fs.getContentSummary(p).getLength(); assertEquals(length, lengths.get(p.getName()).longValue()); splitLength += length; String name = p.getName(); int index = Integer.parseInt(name.substring(name.lastIndexOf("file_") + 5)); assertFalse(bits.get(index)); bits.set(index); } assertEquals(splitLength, split.getLength()); } } assertEquals(bits.cardinality(), numFiles); fs.delete(dir, true); } LOG.info("Test Finished"); }
From source file:org.apache.hadoop.mapreduce.lib.input.TestCombineSequenceFileInputFormat.java
@Test(timeout = 10000) public void testFormat() throws IOException, InterruptedException { Job job = Job.getInstance(conf);/*from www . j a va2 s.co m*/ Random random = new Random(); long seed = random.nextLong(); random.setSeed(seed); localFs.delete(workDir, true); FileInputFormat.setInputPaths(job, workDir); final int length = 10000; final int numFiles = 10; // create files with a variety of lengths createFiles(length, numFiles, random, job); TaskAttemptContext context = MapReduceTestUtil.createDummyMapTaskAttemptContext(job.getConfiguration()); // create a combine split for the files InputFormat<IntWritable, BytesWritable> format = new CombineSequenceFileInputFormat<IntWritable, BytesWritable>(); for (int i = 0; i < 3; i++) { int numSplits = random.nextInt(length / (SequenceFile.SYNC_INTERVAL / 20)) + 1; LOG.info("splitting: requesting = " + numSplits); List<InputSplit> splits = format.getSplits(job); LOG.info("splitting: got = " + splits.size()); // we should have a single split as the length is comfortably smaller than // the block size assertEquals("We got more than one splits!", 1, splits.size()); InputSplit split = splits.get(0); assertEquals("It should be CombineFileSplit", CombineFileSplit.class, split.getClass()); // check the split BitSet bits = new BitSet(length); RecordReader<IntWritable, BytesWritable> reader = format.createRecordReader(split, context); MapContext<IntWritable, BytesWritable, IntWritable, BytesWritable> mcontext = new MapContextImpl<IntWritable, BytesWritable, IntWritable, BytesWritable>( job.getConfiguration(), context.getTaskAttemptID(), reader, null, null, MapReduceTestUtil.createDummyReporter(), split); reader.initialize(split, mcontext); assertEquals("reader class is CombineFileRecordReader.", CombineFileRecordReader.class, reader.getClass()); try { while (reader.nextKeyValue()) { IntWritable key = reader.getCurrentKey(); BytesWritable value = reader.getCurrentValue(); assertNotNull("Value should not be null.", value); final int k = key.get(); LOG.debug("read " + k); assertFalse("Key in multiple partitions.", bits.get(k)); bits.set(k); } } finally { reader.close(); } assertEquals("Some keys in no partition.", length, bits.cardinality()); } }
From source file:de.uniba.wiai.lspi.chord.data.ID.java
private static BitSet bitsetResize(final BitSet a, final int numBits) { assert (numBits > 0); final BitSet b = new BitSet(numBits); for (int i = 0; i < Math.min(a.size(), numBits); ++i) b.set(i, a.get(i));// w w w. j a v a 2 s.co m return b; }
From source file:com.joliciel.jochre.graphics.SegmenterImplTest.java
public void testSplitShape(@NonStrict final LetterGuesserService letterGuesserService, @NonStrict final SourceImage sourceImage, @NonStrict final Shape shape) throws Exception { GraphicsServiceImpl graphicsService = new GraphicsServiceImpl(); graphicsService.setLetterGuesserService(letterGuesserService); final int threshold = 100; final int width = 12; final int height = 9; final int maxBridgeWidth = 2; final int minLetterWeight = 12; final int maxOverlap = 2; final BitSet bitset = new BitSet(width * height); final int left = 10; final int top = 10; new NonStrictExpectations() { {//from ww w.j a v a 2 s.c o m shape.getHeight(); returns(height); shape.getWidth(); returns(width); shape.getLeft(); returns(left); shape.getTop(); returns(top); shape.getRight(); returns(left + width - 1); shape.getBottom(); returns(top + height - 1); sourceImage.getSeparationThreshold(); returns(threshold); int[] pixels = { 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, // row 0 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, // row 1 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 1, // row 2 0, 1, 1, 0, 1, 1, 0, 0, 1, 0, 1, 1, // row 3 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, // row 4 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, // row 5 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, // row 6 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, // row 7 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, // row 8 }; for (int x = -1; x <= width; x++) for (int y = -1; y <= height; y++) { shape.isPixelBlack(x, y, threshold); if (x >= 0 && x < width && y >= 0 && y < height) { returns(pixels[y * width + x] == 1); if (pixels[y * width + x] == 1) { bitset.set(y * width + x); } } else returns(false); } shape.getBlackAndWhiteBitSet(threshold, 0); returns(bitset); } }; SegmenterImpl segmenter = new SegmenterImpl(sourceImage); segmenter.setGraphicsService(graphicsService); List<Shape> shapes = segmenter.splitShape(shape, sourceImage, maxBridgeWidth, minLetterWeight, maxOverlap); for (Shape splitShape : shapes) { LOG.debug("Split shape: " + splitShape); } assertEquals(2, shapes.size()); Shape leftShape = shapes.get(0); assertEquals(left, leftShape.getLeft()); assertEquals(left + 5, leftShape.getRight()); assertEquals(top, leftShape.getTop()); assertEquals(top + 7, leftShape.getBottom()); Shape rightShape = shapes.get(1); assertEquals(left + 6, rightShape.getLeft()); assertEquals(top + 11, rightShape.getRight()); assertEquals(top, rightShape.getTop()); assertEquals(top + 8, rightShape.getBottom()); }
From source file:org.apache.hadoop.mapreduce.lib.input.TestCombineTextInputFormat.java
@Test(timeout = 10000) public void testFormat() throws Exception { Job job = Job.getInstance(new Configuration(defaultConf)); Random random = new Random(); long seed = random.nextLong(); LOG.info("seed = " + seed); random.setSeed(seed);//from www. j av a2 s. c o m localFs.delete(workDir, true); FileInputFormat.setInputPaths(job, workDir); final int length = 10000; final int numFiles = 10; // create files with various lengths createFiles(length, numFiles, random); // create a combined split for the files CombineTextInputFormat format = new CombineTextInputFormat(); for (int i = 0; i < 3; i++) { int numSplits = random.nextInt(length / 20) + 1; LOG.info("splitting: requesting = " + numSplits); List<InputSplit> splits = format.getSplits(job); LOG.info("splitting: got = " + splits.size()); // we should have a single split as the length is comfortably smaller than // the block size assertEquals("We got more than one splits!", 1, splits.size()); InputSplit split = splits.get(0); assertEquals("It should be CombineFileSplit", CombineFileSplit.class, split.getClass()); // check the split BitSet bits = new BitSet(length); LOG.debug("split= " + split); TaskAttemptContext context = MapReduceTestUtil.createDummyMapTaskAttemptContext(job.getConfiguration()); RecordReader<LongWritable, Text> reader = format.createRecordReader(split, context); assertEquals("reader class is CombineFileRecordReader.", CombineFileRecordReader.class, reader.getClass()); MapContext<LongWritable, Text, LongWritable, Text> mcontext = new MapContextImpl<LongWritable, Text, LongWritable, Text>( job.getConfiguration(), context.getTaskAttemptID(), reader, null, null, MapReduceTestUtil.createDummyReporter(), split); reader.initialize(split, mcontext); try { int count = 0; while (reader.nextKeyValue()) { LongWritable key = reader.getCurrentKey(); assertNotNull("Key should not be null.", key); Text value = reader.getCurrentValue(); final int v = Integer.parseInt(value.toString()); LOG.debug("read " + v); assertFalse("Key in multiple partitions.", bits.get(v)); bits.set(v); count++; } LOG.debug("split=" + split + " count=" + count); } finally { reader.close(); } assertEquals("Some keys in no partition.", length, bits.cardinality()); } }
From source file:org.osgp.adapter.protocol.dlms.domain.commands.ConfigurationObjectHelperService.java
/** * Calculate the byte array for the given list of ConfigurationFlagType * objects/*from w w w . j a va 2s. c o m*/ * * @param configurationFlags * List of ConfigurationFlag objects * @return byte array with MSB in first element */ public byte[] toByteArray(final List<ConfigurationFlag> configurationFlags) { final BitSet bitSet = new BitSet(NUMBER_OF_FLAG_BITS); for (final ConfigurationFlag configurationFlag : configurationFlags) { if (configurationFlag.isEnabled()) { bitSet.set(this.toBitPosition(configurationFlag.getConfigurationFlagType()), true); } } final byte[] byteArray = bitSet.toByteArray(); // swap 0 and 1 final byte tmp = byteArray[1]; byteArray[1] = byteArray[0]; byteArray[0] = tmp; return byteArray; }
From source file:com.l2jfree.gameserver.idfactory.BitSetIDFactory.java
protected synchronized void increaseBitSetCapacity() { BitSet newBitSet = new BitSet(PrimeFinder.nextPrime(usedIdCount() * 11 / 10)); newBitSet.or(_freeIds);//www . j av a 2s . c o m _freeIds = newBitSet; }
From source file:org.apache.hadoop.mapreduce.lib.input.TestMRKeyValueTextInputFormat.java
@Test public void testFormat() throws Exception { Job job = Job.getInstance(new Configuration(defaultConf)); Path file = new Path(workDir, "test.txt"); int seed = new Random().nextInt(); LOG.info("seed = " + seed); Random random = new Random(seed); localFs.delete(workDir, true);/* ww w .ja va 2 s. co m*/ FileInputFormat.setInputPaths(job, workDir); final int MAX_LENGTH = 10000; // for a variety of lengths for (int length = 0; length < MAX_LENGTH; length += random.nextInt(MAX_LENGTH / 10) + 1) { LOG.debug("creating; entries = " + length); // create a file with length entries Writer writer = new OutputStreamWriter(localFs.create(file)); try { for (int i = 0; i < length; i++) { writer.write(Integer.toString(i * 2)); writer.write("\t"); writer.write(Integer.toString(i)); writer.write("\n"); } } finally { writer.close(); } // try splitting the file in a variety of sizes KeyValueTextInputFormat format = new KeyValueTextInputFormat(); for (int i = 0; i < 3; i++) { int numSplits = random.nextInt(MAX_LENGTH / 20) + 1; LOG.debug("splitting: requesting = " + numSplits); List<InputSplit> splits = format.getSplits(job); LOG.debug("splitting: got = " + splits.size()); // check each split BitSet bits = new BitSet(length); for (int j = 0; j < splits.size(); j++) { LOG.debug("split[" + j + "]= " + splits.get(j)); TaskAttemptContext context = MapReduceTestUtil .createDummyMapTaskAttemptContext(job.getConfiguration()); RecordReader<Text, Text> reader = format.createRecordReader(splits.get(j), context); Class<?> clazz = reader.getClass(); assertEquals("reader class is KeyValueLineRecordReader.", KeyValueLineRecordReader.class, clazz); MapContext<Text, Text, Text, Text> mcontext = new MapContextImpl<Text, Text, Text, Text>( job.getConfiguration(), context.getTaskAttemptID(), reader, null, null, MapReduceTestUtil.createDummyReporter(), splits.get(j)); reader.initialize(splits.get(j), mcontext); Text key = null; Text value = null; try { int count = 0; while (reader.nextKeyValue()) { key = reader.getCurrentKey(); clazz = key.getClass(); assertEquals("Key class is Text.", Text.class, clazz); value = reader.getCurrentValue(); clazz = value.getClass(); assertEquals("Value class is Text.", Text.class, clazz); final int k = Integer.parseInt(key.toString()); final int v = Integer.parseInt(value.toString()); assertEquals("Bad key", 0, k % 2); assertEquals("Mismatched key/value", k / 2, v); LOG.debug("read " + v); assertFalse("Key in multiple partitions.", bits.get(v)); bits.set(v); count++; } LOG.debug("splits[" + j + "]=" + splits.get(j) + " count=" + count); } finally { reader.close(); } } assertEquals("Some keys in no partition.", length, bits.cardinality()); } } }
From source file:edu.umich.flowfence.service.SandboxManager.java
private void dumpSandboxes() { if (localLOGV) { BitSet seenSandboxes = new BitSet(SANDBOX_COUNT); Log.v(TAG, ">>> Dumping current sandbox state:"); Log.v(TAG, "Running: " + mRunningSandboxes.size() + " sandboxes"); for (Sandbox sb : mRunningSandboxes) { dumpSandbox(sb, seenSandboxes); }/*w ww . ja v a 2s . c o m*/ Log.v(TAG, "Idle: " + mIdleSandboxes.size() + " sandboxes (LRU order)"); for (Sandbox sb : mIdleSandboxes.keySet()) { dumpSandbox(sb, seenSandboxes); } Log.v(TAG, "Stopped: " + mStoppedSandboxes.size() + " sandboxes"); for (Sandbox sb : mStoppedSandboxes) { dumpSandbox(sb, seenSandboxes); } Log.v(TAG, "Hot spares: " + mHotSpares.size() + " sandboxes"); for (Sandbox sb : mHotSpares) { dumpSandbox(sb, seenSandboxes); } seenSandboxes.flip(0, SANDBOX_COUNT); // true = unseen if (!seenSandboxes.isEmpty()) { Log.w(TAG, "WARNING: leaked " + seenSandboxes.cardinality() + " sandboxes"); int leaked = -1; while ((leaked = seenSandboxes.nextSetBit(leaked + 1)) >= 0) { dumpSandbox(Sandbox.get(leaked), null); } } else { Log.v(TAG, "No leaks detected"); } Log.v(TAG, "<<< End of state dump"); } }