Example usage for java.util BitSet BitSet

List of usage examples for java.util BitSet BitSet

Introduction

In this page you can find the example usage for java.util BitSet BitSet.

Prototype

private BitSet(long[] words) 

Source Link

Document

Creates a bit set using words as the internal representation.

Usage

From source file:fingerprints.HashedBloomFingerprinter.java

private BitSet generateFingerprint(IAtomContainer container) {
    BitSet walkBits = bloomFilter.toBitSet();
    BitSet result = new BitSet(getFingerprintLength());
    result.or(walkBits);//  w  w w .ja  va 2s  . co  m
    if (isRespectRingMatches()) {
        IRingSet rings = new RingSet();
        IRingSet allRings;
        try {
            allRings = arf.findAllRings(container);
            rings.add(allRings);
        } catch (CDKException e) {
            logger.debug(e.toString());
        }

        // sets SSSR information
        SSSRFinder finder = new SSSRFinder(container);
        IRingSet sssr = finder.findEssentialRings();
        rings.add(sssr);
        RingSetManipulator.markAromaticRings(rings);
        RingSetManipulator.sort(rings);
        setRingBits(result, rings);
    }
    bloomFilter.clear();
    return result;
}

From source file:org.apache.hadoop.mapreduce.lib.input.TestMRKeyValueTextInputFormat.java

@Test
public void testSplitableCodecs() throws Exception {
    final Job job = Job.getInstance(defaultConf);
    final Configuration conf = job.getConfiguration();

    // Create the codec
    CompressionCodec codec = null;//from  ww w  . ja  v a2s .  c o  m
    try {
        codec = (CompressionCodec) ReflectionUtils
                .newInstance(conf.getClassByName("org.apache.hadoop.io.compress.BZip2Codec"), conf);
    } catch (ClassNotFoundException cnfe) {
        throw new IOException("Illegal codec!");
    }
    Path file = new Path(workDir, "test" + codec.getDefaultExtension());

    int seed = new Random().nextInt();
    LOG.info("seed = " + seed);
    Random random = new Random(seed);

    localFs.delete(workDir, true);
    FileInputFormat.setInputPaths(job, workDir);

    final int MAX_LENGTH = 500000;
    FileInputFormat.setMaxInputSplitSize(job, MAX_LENGTH / 20);
    // for a variety of lengths
    for (int length = 0; length < MAX_LENGTH; length += random.nextInt(MAX_LENGTH / 4) + 1) {

        LOG.info("creating; entries = " + length);

        // create a file with length entries
        Writer writer = new OutputStreamWriter(codec.createOutputStream(localFs.create(file)));
        try {
            for (int i = 0; i < length; i++) {
                writer.write(Integer.toString(i * 2));
                writer.write("\t");
                writer.write(Integer.toString(i));
                writer.write("\n");
            }
        } finally {
            writer.close();
        }

        // try splitting the file in a variety of sizes
        KeyValueTextInputFormat format = new KeyValueTextInputFormat();
        assertTrue("KVTIF claims not splittable", format.isSplitable(job, file));
        for (int i = 0; i < 3; i++) {
            int numSplits = random.nextInt(MAX_LENGTH / 2000) + 1;
            LOG.info("splitting: requesting = " + numSplits);
            List<InputSplit> splits = format.getSplits(job);
            LOG.info("splitting: got =        " + splits.size());

            // check each split
            BitSet bits = new BitSet(length);
            for (int j = 0; j < splits.size(); j++) {
                LOG.debug("split[" + j + "]= " + splits.get(j));
                TaskAttemptContext context = MapReduceTestUtil
                        .createDummyMapTaskAttemptContext(job.getConfiguration());
                RecordReader<Text, Text> reader = format.createRecordReader(splits.get(j), context);
                Class<?> clazz = reader.getClass();
                MapContext<Text, Text, Text, Text> mcontext = new MapContextImpl<Text, Text, Text, Text>(
                        job.getConfiguration(), context.getTaskAttemptID(), reader, null, null,
                        MapReduceTestUtil.createDummyReporter(), splits.get(j));
                reader.initialize(splits.get(j), mcontext);

                Text key = null;
                Text value = null;
                try {
                    int count = 0;
                    while (reader.nextKeyValue()) {
                        key = reader.getCurrentKey();
                        value = reader.getCurrentValue();
                        final int k = Integer.parseInt(key.toString());
                        final int v = Integer.parseInt(value.toString());
                        assertEquals("Bad key", 0, k % 2);
                        assertEquals("Mismatched key/value", k / 2, v);
                        LOG.debug("read " + k + "," + v);
                        assertFalse(k + "," + v + " in multiple partitions.", bits.get(v));
                        bits.set(v);
                        count++;
                    }
                    if (count > 0) {
                        LOG.info("splits[" + j + "]=" + splits.get(j) + " count=" + count);
                    } else {
                        LOG.debug("splits[" + j + "]=" + splits.get(j) + " count=" + count);
                    }
                } finally {
                    reader.close();
                }
            }
            assertEquals("Some keys in no partition.", length, bits.cardinality());
        }

    }
}

From source file:org.osgp.adapter.protocol.dlms.domain.commands.SetAlarmNotificationsCommandExecutor.java

public long alarmFilterLongValue(final AlarmNotifications alarmNotifications) {

    final BitSet bitSet = new BitSet(NUMBER_OF_BITS_IN_ALARM_FILTER);
    final Set<AlarmNotification> notifications = alarmNotifications.getAlarmNotifications();
    for (final AlarmNotification alarmNotification : notifications) {
        bitSet.set(alarmRegisterBitIndexPerAlarmType.get(alarmNotification.getAlarmType()),
                alarmNotification.isEnabled());
    }//from w w w.j a va  2s .  c  om

    return bitSet.toLongArray()[0];
}

From source file:net.solarnetwork.node.control.sma.pcm.ModbusPCMController.java

private synchronized boolean setPCMStatus(Integer desiredValue) {
    final BitSet bits = new BitSet(4);
    final int v = desiredValue;
    for (int i = 0; i < 4; i++) {
        bits.set(i, ((v >> i) & 1) == 1);
    }//from  w ww  . j  a va  2 s  . com
    log.info("Setting PCM status to {} ({}%)", desiredValue, percentValueForIntegerValue(desiredValue));
    final Integer[] addresses = new Integer[] { d1Address, d2Address, d3Address, d4Address };
    try {
        return performAction(new ModbusConnectionAction<Boolean>() {

            @Override
            public Boolean doWithConnection(ModbusConnection conn) throws IOException {
                return conn.writeDiscreetValues(addresses, bits);
            }
        });
    } catch (IOException e) {
        log.error("Error communicating with PCM: {}", e.getMessage());
    }
    return false;
}

From source file:org.apache.openjpa.datacache.AbstractDataCache.java

public BitSet pinAll(Collection<Object> keys) {
    if (keys.isEmpty())
        return EMPTY_BITSET;

    BitSet set = new BitSet(keys.size());
    int i = 0;//from   www  . jav a  2s . c o  m
    for (Iterator<Object> iter = keys.iterator(); iter.hasNext(); i++)
        if (pin(iter.next()))
            set.set(i);
    return set;
}

From source file:edu.brown.hstore.dtxn.LocalTransaction.java

/**
 * Constructor//from  ww  w.  j  a v  a  2  s.c  o  m
 * This does not fully initialize this transaction.
 * You must call init() before this can be used
 */
public LocalTransaction(HStoreSite hstore_site) {
    super(hstore_site);

    HStoreConf hstore_conf = hstore_site.getHStoreConf();
    this.profiler = (hstore_conf.site.txn_profiling ? new TransactionProfile() : null);

    this.itask = new InitiateTaskMessage();

    int num_partitions = CatalogUtil.getNumberOfPartitions(hstore_site.getSite());
    this.done_partitions = new BitSet(num_partitions);
    //        this.exec_touchedPartitions = new FastIntHistogram(num_partitions);
}

From source file:org.apache.openjpa.datacache.AbstractDataCache.java

public BitSet unpinAll(Collection<Object> keys) {
    if (keys.isEmpty())
        return EMPTY_BITSET;

    BitSet set = new BitSet(keys.size());
    int i = 0;/*from w ww .  j a  v  a 2 s. c om*/
    for (Iterator<Object> iter = keys.iterator(); iter.hasNext(); i++)
        if (unpin(iter.next()))
            set.set(i);
    return set;
}

From source file:com.netspective.commons.acl.AccessControlListTest.java

/**
 * This test makes sure that an ACL is read properly from a file containing just a single <access-control-list>
 * tag with the name "main"./*from w  w  w  . j a v a 2 s.c  o m*/
 */
public void testSingleACLNonDefaultDataModelSchemaImportFromXmlValid() throws RoleNotFoundException,
        PermissionNotFoundException, DataModelException, InvocationTargetException, NoSuchMethodException,
        InstantiationException, IllegalAccessException, IOException {
    AccessControlListsComponent aclc = (AccessControlListsComponent) XdmComponentFactory.get(
            AccessControlListsComponent.class, new Resource(AccessControlListTest.class, RESOURCE_NAME_TWO),
            XdmComponentFactory.XDMCOMPFLAGS_DEFAULT);

    // Verify _something_ was loaded...
    assertNotNull(aclc);

    // Verify exactly _one_ ACL was loaded...
    AccessControlListsManager aclm = aclc.getItems();
    assertNotNull("Expected: AccessControlListsManager object, Found: null", aclm);
    AccessControlLists acls = aclm.getAccessControlLists();
    Integer expectedNumACLs = new Integer(1);
    assertNotNull("Expected: AccessControlLists object, Found: null", acls);
    assertEquals("Expected: " + expectedNumACLs + " ACL, Found: " + acls.size(), expectedNumACLs.intValue(),
            acls.size());

    // Verify there is no defaultAcl and that it returns a null
    AccessControlList defaultAcl = aclm.getDefaultAccessControlList();
    assertNull("Expected: Default ACL = null, Found: Non-Null ACL", defaultAcl);

    AccessControlList mainAcl = aclm.getAccessControlList("main");
    assertNotNull("Expected: ACL named 'main', Found: null", mainAcl);
    assertEquals("Expected: ACL named 'main', Found: ACL named '" + mainAcl.getName() + "'", "main",
            mainAcl.getName());

    // Verify exactly _eleven_ permissions were loaded in this acl
    Map aclPermissions = mainAcl.getPermissionsByName();
    assertNotNull("Expected: List of Permissions for ACL, Found: null", aclPermissions);
    assertEquals("Expected: Total permissions = 8, Found: Total permissions = " + aclPermissions.size(), 8,
            aclPermissions.size());

    // Verify exactly _two_ roles were loaded in this acl
    Map aclRoles = mainAcl.getRolesByName();
    assertNotNull(aclRoles);
    assertEquals(3, aclRoles.size());

    // Verify the /main/app/orders/edit_order permission has the right values
    Permission editOrder = mainAcl.getPermission("/main/app/orders/edit_order");
    assertEquals(4, editOrder.getId());
    assertEquals(2, editOrder.getAncestorsCount());
    assertEquals("    /main/app/orders/edit_order = 4 {4}\n", editOrder.toString());

    // Verify the /acl/role/normal-user has the proper id
    Role normalUser = mainAcl.getRole("/main/role/normal-user");
    assertEquals("Expected: Id for /main/role/normal-user = 2, Found: " + normalUser.getId(), 2,
            normalUser.getId());
    assertEquals(1, normalUser.getAncestorsCount());
    assertEquals("  /main/role/normal-user = 2 {1, 2, 3, 4, 5}\n", normalUser.toString());

    // Verify the set of permissions for /acl/role/normal-user are exactly what we expect
    BitSet normalUserPermissionSet = normalUser.getPermissions();
    BitSet expectedPermissionSet = new BitSet(11);
    expectedPermissionSet.set(1);
    expectedPermissionSet.set(2);
    expectedPermissionSet.set(3);
    expectedPermissionSet.set(4);
    expectedPermissionSet.set(5);
    assertEquals("Expected: Permissions for /main/role/normal-user = " + expectedPermissionSet + ", Found: "
            + normalUserPermissionSet, expectedPermissionSet, normalUserPermissionSet);

    aclc.printErrorsAndWarnings();
}

From source file:de.uniba.wiai.lspi.chord.data.ID.java

private ID() {
    id = new BitSet(kTotalBitLen);
}

From source file:org.apache.tez.runtime.library.common.writers.TestUnorderedPartitionedKVWriter.java

public void textTest(int numRegularRecords, int numPartitions, long availableMemory, int numLargeKeys,
        int numLargevalues, int numLargeKvPairs) throws IOException, InterruptedException {
    Partitioner partitioner = new HashPartitioner();
    ApplicationId appId = ApplicationId.newInstance(10000, 1);
    TezCounters counters = new TezCounters();
    String uniqueId = UUID.randomUUID().toString();
    OutputContext outputContext = createMockOutputContext(counters, appId, uniqueId);
    Random random = new Random();

    Configuration conf = createConfiguration(outputContext, Text.class, Text.class, shouldCompress, -1,
            HashPartitioner.class);
    CompressionCodec codec = null;/*www.j  a  va  2  s .  com*/
    if (shouldCompress) {
        codec = new DefaultCodec();
        ((Configurable) codec).setConf(conf);
    }

    int numRecordsWritten = 0;

    Map<Integer, Multimap<String, String>> expectedValues = new HashMap<Integer, Multimap<String, String>>();
    for (int i = 0; i < numPartitions; i++) {
        expectedValues.put(i, LinkedListMultimap.<String, String>create());
    }

    UnorderedPartitionedKVWriter kvWriter = new UnorderedPartitionedKVWriterForTest(outputContext, conf,
            numPartitions, availableMemory);

    int sizePerBuffer = kvWriter.sizePerBuffer;

    BitSet partitionsWithData = new BitSet(numPartitions);
    Text keyText = new Text();
    Text valText = new Text();
    for (int i = 0; i < numRegularRecords; i++) {
        String key = createRandomString(Math.abs(random.nextInt(10)));
        String val = createRandomString(Math.abs(random.nextInt(20)));
        keyText.set(key);
        valText.set(val);
        int partition = partitioner.getPartition(keyText, valText, numPartitions);
        partitionsWithData.set(partition);
        expectedValues.get(partition).put(key, val);
        kvWriter.write(keyText, valText);
        numRecordsWritten++;
    }

    // Write Large key records
    for (int i = 0; i < numLargeKeys; i++) {
        String key = createRandomString(sizePerBuffer + Math.abs(random.nextInt(100)));
        String val = createRandomString(Math.abs(random.nextInt(20)));
        keyText.set(key);
        valText.set(val);
        int partition = partitioner.getPartition(keyText, valText, numPartitions);
        partitionsWithData.set(partition);
        expectedValues.get(partition).put(key, val);
        kvWriter.write(keyText, valText);
        numRecordsWritten++;
    }

    // Write Large val records
    for (int i = 0; i < numLargevalues; i++) {
        String key = createRandomString(Math.abs(random.nextInt(10)));
        String val = createRandomString(sizePerBuffer + Math.abs(random.nextInt(100)));
        keyText.set(key);
        valText.set(val);
        int partition = partitioner.getPartition(keyText, valText, numPartitions);
        partitionsWithData.set(partition);
        expectedValues.get(partition).put(key, val);
        kvWriter.write(keyText, valText);
        numRecordsWritten++;
    }

    // Write records where key + val are large (but both can fit in the buffer individually)
    for (int i = 0; i < numLargeKvPairs; i++) {
        String key = createRandomString(sizePerBuffer / 2 + Math.abs(random.nextInt(100)));
        String val = createRandomString(sizePerBuffer / 2 + Math.abs(random.nextInt(100)));
        keyText.set(key);
        valText.set(val);
        int partition = partitioner.getPartition(keyText, valText, numPartitions);
        partitionsWithData.set(partition);
        expectedValues.get(partition).put(key, val);
        kvWriter.write(keyText, valText);
        numRecordsWritten++;
    }

    List<Event> events = kvWriter.close();
    verify(outputContext, never()).fatalError(any(Throwable.class), any(String.class));

    TezCounter outputLargeRecordsCounter = counters.findCounter(TaskCounter.OUTPUT_LARGE_RECORDS);
    assertEquals(numLargeKeys + numLargevalues + numLargeKvPairs, outputLargeRecordsCounter.getValue());

    // Validate the event
    assertEquals(1, events.size());
    assertTrue(events.get(0) instanceof CompositeDataMovementEvent);
    CompositeDataMovementEvent cdme = (CompositeDataMovementEvent) events.get(0);
    assertEquals(0, cdme.getSourceIndexStart());
    assertEquals(numPartitions, cdme.getCount());
    DataMovementEventPayloadProto eventProto = DataMovementEventPayloadProto
            .parseFrom(ByteString.copyFrom(cdme.getUserPayload()));
    assertFalse(eventProto.hasData());
    BitSet emptyPartitionBits = null;
    if (partitionsWithData.cardinality() != numPartitions) {
        assertTrue(eventProto.hasEmptyPartitions());
        byte[] emptyPartitions = TezCommonUtils
                .decompressByteStringToByteArray(eventProto.getEmptyPartitions());
        emptyPartitionBits = TezUtilsInternal.fromByteArray(emptyPartitions);
        assertEquals(numPartitions - partitionsWithData.cardinality(), emptyPartitionBits.cardinality());
    } else {
        assertFalse(eventProto.hasEmptyPartitions());
        emptyPartitionBits = new BitSet(numPartitions);
    }
    assertEquals(HOST_STRING, eventProto.getHost());
    assertEquals(SHUFFLE_PORT, eventProto.getPort());
    assertEquals(uniqueId, eventProto.getPathComponent());

    // Verify the data
    // Verify the actual data
    TezTaskOutput taskOutput = new TezTaskOutputFiles(conf, uniqueId);
    Path outputFilePath = null;
    Path spillFilePath = null;
    try {
        outputFilePath = taskOutput.getOutputFile();
    } catch (DiskErrorException e) {
        if (numRecordsWritten > 0) {
            fail();
        } else {
            // Record checking not required.
            return;
        }
    }
    try {
        spillFilePath = taskOutput.getOutputIndexFile();
    } catch (DiskErrorException e) {
        if (numRecordsWritten > 0) {
            fail();
        } else {
            // Record checking not required.
            return;
        }
    }

    // Special case for 0 records.
    TezSpillRecord spillRecord = new TezSpillRecord(spillFilePath, conf);
    DataInputBuffer keyBuffer = new DataInputBuffer();
    DataInputBuffer valBuffer = new DataInputBuffer();
    Text keyDeser = new Text();
    Text valDeser = new Text();
    for (int i = 0; i < numPartitions; i++) {
        if (emptyPartitionBits.get(i)) {
            continue;
        }
        TezIndexRecord indexRecord = spillRecord.getIndex(i);
        FSDataInputStream inStream = FileSystem.getLocal(conf).open(outputFilePath);
        inStream.seek(indexRecord.getStartOffset());
        IFile.Reader reader = new IFile.Reader(inStream, indexRecord.getPartLength(), codec, null, null, false,
                0, -1);
        while (reader.nextRawKey(keyBuffer)) {
            reader.nextRawValue(valBuffer);
            keyDeser.readFields(keyBuffer);
            valDeser.readFields(valBuffer);
            int partition = partitioner.getPartition(keyDeser, valDeser, numPartitions);
            assertTrue(expectedValues.get(partition).remove(keyDeser.toString(), valDeser.toString()));
        }
        inStream.close();
    }
    for (int i = 0; i < numPartitions; i++) {
        assertEquals(0, expectedValues.get(i).size());
        expectedValues.remove(i);
    }
    assertEquals(0, expectedValues.size());
}