List of usage examples for java.util BitSet BitSet
public BitSet()
From source file:com.p2p.peercds.client.SharedTorrent.java
/** * Create a new shared torrent from meta-info binary data. * * @param torrent The meta-info byte data. * @param parent The parent directory or location the torrent files. * @param seeder Whether we're a seeder for this torrent or not (disables * validation)./* ww w. j ava2 s .c o m*/ * @throws FileNotFoundException If the torrent file location or * destination directory does not exist and can't be created. * @throws IOException If the torrent file cannot be read or decoded. */ public SharedTorrent(byte[] torrent, File parent, boolean seeder) throws FileNotFoundException, IOException { super(torrent, seeder); if (parent == null || !parent.isDirectory()) { throw new IllegalArgumentException("Invalid parent directory!"); } String parentPath = parent.getCanonicalPath(); try { this.pieceLength = this.decoded_info.get("piece length").getInt(); this.piecesHashes = ByteBuffer.wrap(this.decoded_info.get("pieces").getBytes()); if (this.piecesHashes.capacity() / PIECE_HASH_SIZE * (long) this.pieceLength < this.getSize()) { throw new IllegalArgumentException( "Torrent size does not " + "match the number of pieces and the piece size!"); } } catch (InvalidBEncodingException ibee) { throw new IllegalArgumentException("Error reading torrent meta-info fields!"); } List<FileStorage> files = new LinkedList<FileStorage>(); long offset = 0L; for (Torrent.TorrentFile file : this.files) { File actual = new File(parent, file.file.getPath()); if (!actual.getCanonicalPath().startsWith(parentPath)) { throw new SecurityException("Torrent file path attempted " + "to break directory jail!"); } actual.getParentFile().mkdirs(); files.add(new FileStorage(actual, offset, file.size)); offset += file.size; } this.bucket = new FileCollectionStorage(files, this.getSize()); this.random = new Random(System.currentTimeMillis()); this.stop = false; this.uploaded = 0; this.downloaded = 0; this.numBytesFetchedFromCloud = 0; this.lastCloudFetchTime = 0; this.left = this.getSize(); this.initialized = false; this.pieces = new Piece[0]; this.rarest = Collections.synchronizedSortedSet(new TreeSet<Piece>()); this.completedPieces = new BitSet(); this.requestedPieces = new BitSet(); }
From source file:org.apache.jackrabbit.core.query.lucene.JahiaLuceneQueryFactoryImpl.java
/** * Override LuceneQueryFactory.execute() *//*from ww w . ja v a2 s .c o m*/ @Override public List<Row> execute(Map<String, PropertyValue> columns, Selector selector, Constraint constraint, Sort sort, boolean externalSort, long offsetIn, long limitIn) throws RepositoryException, IOException { final IndexReader reader = index.getIndexReader(true); final int offset = offsetIn < 0 ? 0 : (int) offsetIn; final int limit = limitIn < 0 ? Integer.MAX_VALUE : (int) limitIn; QueryHits hits = null; try { JackrabbitIndexSearcher searcher = new JackrabbitIndexSearcher(session, reader, index.getContext().getItemStateManager()); searcher.setSimilarity(index.getSimilarity()); Predicate filter = Predicate.TRUE; BooleanQuery query = new BooleanQuery(); QueryPair qp = new QueryPair(query); query.add(create(selector), MUST); if (constraint != null) { String name = selector.getSelectorName(); NodeType type = ntManager.getNodeType(selector.getNodeTypeName()); filter = mapConstraintToQueryAndFilter(qp, constraint, Collections.singletonMap(name, type), searcher, reader); } // Added by jahia Set<String> foundIds = new HashSet<String>(); int hasFacets = FacetHandler.hasFacetFunctions(columns, session); CountHandler.CountType countType = CountHandler.hasCountFunction(columns, session); boolean isCount = countType != null; BitSet bitset = (hasFacets & FacetHandler.FACET_COLUMNS) == 0 ? null : new BitSet(); // End List<Row> rowList = externalSort ? new LinkedList<Row>() : null; Map<String, Row> rows = externalSort ? null : new LinkedHashMap<String, Row>(); hits = searcher.evaluate(qp.mainQuery, sort, offset + limit); int currentNode = 0; int addedNodes = 0; int resultCount = 0; int hitsSize = 0; ScoreNode node = hits.nextScoreNode(); Map<String, Boolean> checkedAcls = new HashMap<String, Boolean>(); while (node != null) { if (isCount && countType.isApproxCount()) { hitsSize++; if (hitsSize > countType.getApproxCountLimit()) { if (hits.getSize() > 0) { hitsSize = hits.getSize(); break; } else { node = hits.nextScoreNode(); continue; } } } IndexedNodeInfo infos = getIndexedNodeInfo(node, reader, isCount && countType.isSkipChecks()); if (foundIds.add(infos.getMainNodeUuid())) { // <-- Added by jahia if (isCount && countType.isSkipChecks()) { resultCount++; } else { try { boolean canRead = true; if (isAclUuidInIndex()) { canRead = checkIndexedAcl(checkedAcls, infos); } boolean checkVisibility = "1".equals(infos.getCheckVisibility()) && Constants.LIVE_WORKSPACE.equals(session.getWorkspace().getName()); if (canRead && (!Constants.LIVE_WORKSPACE.equals(session.getWorkspace().getName()) || ((infos.getPublished() == null || "true".equals(infos.getPublished())) && (infos.getCheckInvalidLanguages() == null || getLocale() == null || !infos.getCheckInvalidLanguages() .contains(getLocale().toString()))))) { if (filter == Predicate.TRUE) { // <-- Added by jahia if ((hasFacets & FacetHandler.ONLY_FACET_COLUMNS) == 0) { Row row = null; if (checkVisibility || !isAclUuidInIndex()) { NodeImpl objectNode = getNodeWithAclAndVisibilityCheck(node, checkVisibility); if (isCount) { resultCount++; } else { row = new LazySelectorRow(columns, evaluator, selector.getSelectorName(), objectNode, node.getScore()); } } else { if (isCount) { resultCount++; } else { row = new LazySelectorRow(columns, evaluator, selector.getSelectorName(), node.getNodeId(), node.getScore()); } } if (row == null) { continue; } if (externalSort) { rowList.add(row); } else { // apply limit and offset rules locally if (currentNode >= offset && currentNode - offset < limit) { rows.put(node.getNodeId().toString(), row); addedNodes++; } currentNode++; // end the loop when going over the limit if (addedNodes == limit) { break; } } } if ((hasFacets & FacetHandler.FACET_COLUMNS) == FacetHandler.FACET_COLUMNS) { //Added by Jahia //can be added to bitset when ACL checked and not in live mode or no visibility rule to check if (isAclUuidInIndex() && !checkVisibility) { bitset.set(infos.getDocNumber()); } else { //try to load nodeWrapper to check the visibility rules NodeImpl objectNode = getNodeWithAclAndVisibilityCheck(node, checkVisibility); bitset.set(infos.getDocNumber()); } //!Added by Jahia } } else { NodeImpl objectNode = session.getNodeById(node.getNodeId()); if (objectNode.isNodeType("jnt:translation")) { objectNode = (NodeImpl) objectNode.getParent(); } if (isCount) { resultCount++; } else { Row row = new SelectorRow(columns, evaluator, selector.getSelectorName(), objectNode, node.getScore()); if (filter.evaluate(row)) { if ((hasFacets & FacetHandler.ONLY_FACET_COLUMNS) == 0) { if (externalSort) { rowList.add(row); } else { // apply limit and offset rules locally if (currentNode >= offset && currentNode - offset < limit) { rows.put(node.getNodeId().toString(), row); addedNodes++; } currentNode++; // end the loop when going over the limit if (addedNodes == limit) { break; } } } if ((hasFacets & FacetHandler.FACET_COLUMNS) == FacetHandler.FACET_COLUMNS) { bitset.set(infos.getDocNumber()); // <-- Added by jahia } } } } } } catch (PathNotFoundException e) { } catch (ItemNotFoundException e) { // skip the node } } } else { if (((hasFacets & FacetHandler.ONLY_FACET_COLUMNS) == 0) && !isCount && !externalSort && !infos.getMainNodeUuid().equals(node.getNodeId().toString()) && rows.containsKey(infos.getMainNodeUuid())) { // we've got the translation node -> adjusting the position of the original node in the result list rows.put(infos.getMainNodeUuid(), rows.remove(infos.getMainNodeUuid())); } } // <-- Added by jahia node = hits.nextScoreNode(); } if (rowList == null) { if (rows != null) { rowList = new LinkedList<Row>(rows.values()); } else { rowList = new LinkedList<Row>(); } } // Added by jahia if ((hasFacets & FacetHandler.FACET_COLUMNS) == FacetHandler.FACET_COLUMNS) { OpenBitSet docIdSet = new OpenBitSetDISI(new DocIdBitSet(bitset).iterator(), bitset.size()); FacetHandler h = new FacetHandler(columns, selector, docIdSet, index, session, nsMappings); h.handleFacets(reader); rowList.add(0, h.getFacetsRow()); } else if (isCount) { boolean wasApproxLimitReached = false; if (countType.isApproxCount() && hitsSize > countType.getApproxCountLimit()) { resultCount = hitsSize * resultCount / countType.getApproxCountLimit(); resultCount = (int) Math.ceil(MathUtils.round(resultCount, resultCount < 1000 ? -1 : (resultCount < 10000 ? -2 : -3), BigDecimal.ROUND_UP)); wasApproxLimitReached = true; } rowList.add(0, CountHandler.createCountRow(resultCount, wasApproxLimitReached)); } // End return rowList; } finally { if (hits != null) { hits.close(); } Util.closeOrRelease(reader); } }
From source file:au.org.ala.delta.translation.intkey.IntkeyCharactersFileWriter.java
protected void writeCharacterMask() { List<Boolean> includedCharacters = _encoder.encodeCharacterMasks(_dataSet, true); BitSet charMask = new BitSet(); for (int i = 0; i < includedCharacters.size(); i++) { if (includedCharacters.get(i)) { charMask.set(i);/*w w w .j av a 2 s . c om*/ } } _charsFile.writeCharacterMask(_dataSet.getNumberOfCharacters(), charMask); }
From source file:org.apache.hyracks.control.cc.executor.ActivityClusterPlanner.java
private Map<TaskId, List<Pair<TaskId, ConnectorDescriptorId>>> computeTaskConnectivity(JobRun jobRun, Map<ActivityId, ActivityPlan> activityPlanMap, Set<ActivityId> activities) { Map<TaskId, List<Pair<TaskId, ConnectorDescriptorId>>> taskConnectivity = new HashMap<>(); ActivityClusterGraph acg = jobRun.getActivityClusterGraph(); BitSet targetBitmap = new BitSet(); for (ActivityId ac1 : activities) { ActivityCluster ac = acg.getActivityMap().get(ac1); Task[] ac1TaskStates = activityPlanMap.get(ac1).getTasks(); int nProducers = ac1TaskStates.length; List<IConnectorDescriptor> outputConns = ac.getActivityOutputMap().get(ac1); if (outputConns == null) { continue; }/*from w w w .j a v a2 s. c om*/ for (IConnectorDescriptor c : outputConns) { ConnectorDescriptorId cdId = c.getConnectorId(); ActivityId ac2 = ac.getConsumerActivity(cdId); Task[] ac2TaskStates = activityPlanMap.get(ac2).getTasks(); int nConsumers = ac2TaskStates.length; if (c.allProducersToAllConsumers()) { List<Pair<TaskId, ConnectorDescriptorId>> cInfoList = new ArrayList<>(); for (int j = 0; j < nConsumers; j++) { TaskId targetTID = ac2TaskStates[j].getTaskId(); cInfoList.add(Pair.of(targetTID, cdId)); } for (int i = 0; i < nProducers; ++i) { taskConnectivity.put(ac1TaskStates[i].getTaskId(), cInfoList); } continue; } for (int i = 0; i < nProducers; ++i) { c.indicateTargetPartitions(nProducers, nConsumers, i, targetBitmap); List<Pair<TaskId, ConnectorDescriptorId>> cInfoList = taskConnectivity .get(ac1TaskStates[i].getTaskId()); if (cInfoList == null) { cInfoList = new ArrayList<>(); taskConnectivity.put(ac1TaskStates[i].getTaskId(), cInfoList); } for (int j = targetBitmap.nextSetBit(0); j >= 0; j = targetBitmap.nextSetBit(j + 1)) { TaskId targetTID = ac2TaskStates[j].getTaskId(); cInfoList.add(Pair.of(targetTID, cdId)); } } } } return taskConnectivity; }
From source file:edu.uci.ics.hyracks.control.cc.scheduler.ActivityClusterPlanner.java
private Map<TaskId, List<Pair<TaskId, ConnectorDescriptorId>>> computeTaskConnectivity(JobRun jobRun, Map<ActivityId, ActivityPlan> activityPlanMap, Set<ActivityId> activities) { Map<TaskId, List<Pair<TaskId, ConnectorDescriptorId>>> taskConnectivity = new HashMap<TaskId, List<Pair<TaskId, ConnectorDescriptorId>>>(); ActivityClusterGraph acg = jobRun.getActivityClusterGraph(); BitSet targetBitmap = new BitSet(); for (ActivityId ac1 : activities) { ActivityCluster ac = acg.getActivityMap().get(ac1); Task[] ac1TaskStates = activityPlanMap.get(ac1).getTasks(); int nProducers = ac1TaskStates.length; List<IConnectorDescriptor> outputConns = ac.getActivityOutputMap().get(ac1); if (outputConns != null) { for (IConnectorDescriptor c : outputConns) { ConnectorDescriptorId cdId = c.getConnectorId(); ActivityId ac2 = ac.getConsumerActivity(cdId); Task[] ac2TaskStates = activityPlanMap.get(ac2).getTasks(); int nConsumers = ac2TaskStates.length; if (c.allProducersToAllConsumers()) { List<Pair<TaskId, ConnectorDescriptorId>> cInfoList = new ArrayList<Pair<TaskId, ConnectorDescriptorId>>(); for (int j = 0; j < nConsumers; j++) { TaskId targetTID = ac2TaskStates[j].getTaskId(); cInfoList.add(Pair.<TaskId, ConnectorDescriptorId>of(targetTID, cdId)); }//from w ww . ja v a 2 s. c om for (int i = 0; i < nProducers; ++i) { taskConnectivity.put(ac1TaskStates[i].getTaskId(), cInfoList); } } else { for (int i = 0; i < nProducers; ++i) { c.indicateTargetPartitions(nProducers, nConsumers, i, targetBitmap); List<Pair<TaskId, ConnectorDescriptorId>> cInfoList = taskConnectivity .get(ac1TaskStates[i].getTaskId()); if (cInfoList == null) { cInfoList = new ArrayList<Pair<TaskId, ConnectorDescriptorId>>(); taskConnectivity.put(ac1TaskStates[i].getTaskId(), cInfoList); } for (int j = targetBitmap.nextSetBit(0); j >= 0; j = targetBitmap.nextSetBit(j + 1)) { TaskId targetTID = ac2TaskStates[j].getTaskId(); cInfoList.add(Pair.<TaskId, ConnectorDescriptorId>of(targetTID, cdId)); } } } } } } return taskConnectivity; }
From source file:org.apache.eagle.alert.engine.serialization.PartitionedEventSerializerTest.java
@Test public void testBitSet() { BitSet bitSet = new BitSet(); bitSet.set(0, true); // 1 bitSet.set(1, false); // 0 bitSet.set(2, true); // 1 LOG.info("Bit Set Size: {}", bitSet.size()); LOG.info("Bit Set Byte[]: {}", bitSet.toByteArray()); LOG.info("Bit Set Byte[]: {}", bitSet.toLongArray()); LOG.info("BitSet[0]: {}", bitSet.get(0)); LOG.info("BitSet[1]: {}", bitSet.get(1)); LOG.info("BitSet[1]: {}", bitSet.get(2)); byte[] bytes = bitSet.toByteArray(); BitSet bitSet2 = BitSet.valueOf(bytes); LOG.info("Bit Set Size: {}", bitSet2.size()); LOG.info("Bit Set Byte[]: {}", bitSet2.toByteArray()); LOG.info("Bit Set Byte[]: {}", bitSet2.toLongArray()); LOG.info("BitSet[0]: {}", bitSet2.get(0)); LOG.info("BitSet[1]: {}", bitSet2.get(1)); LOG.info("BitSet[1]: {}", bitSet2.get(2)); BitSet bitSet3 = new BitSet(); bitSet3.set(0, true);//from w w w . j a v a 2 s.c om Assert.assertEquals(1, bitSet3.length()); BitSet bitSet4 = new BitSet(); bitSet4.set(0, false); Assert.assertEquals(0, bitSet4.length()); Assert.assertFalse(bitSet4.get(1)); Assert.assertFalse(bitSet4.get(2)); }
From source file:org.apache.pig.tools.pigstats.mapreduce.MRScriptState.java
public String getPigFeature(MapReduceOper mro) { if (featureMap == null) { featureMap = new HashMap<MapReduceOper, String>(); }//from w w w .j av a 2 s. c o m String retStr = featureMap.get(mro); if (retStr == null) { BitSet feature = new BitSet(); feature.clear(); if (mro.isSkewedJoin()) { feature.set(PIG_FEATURE.SKEWED_JOIN.ordinal()); } if (mro.isGlobalSort()) { feature.set(PIG_FEATURE.ORDER_BY.ordinal()); } if (mro.isSampler()) { feature.set(PIG_FEATURE.SAMPLER.ordinal()); } if (mro.isIndexer()) { feature.set(PIG_FEATURE.INDEXER.ordinal()); } if (mro.isCogroup()) { feature.set(PIG_FEATURE.COGROUP.ordinal()); } if (mro.isGroupBy()) { feature.set(PIG_FEATURE.GROUP_BY.ordinal()); } if (mro.isRegularJoin()) { feature.set(PIG_FEATURE.HASH_JOIN.ordinal()); } if (mro.needsDistinctCombiner()) { feature.set(PIG_FEATURE.DISTINCT.ordinal()); } if (!mro.combinePlan.isEmpty()) { feature.set(PIG_FEATURE.COMBINER.ordinal()); } if (mro instanceof NativeMapReduceOper) { feature.set(PIG_FEATURE.NATIVE.ordinal()); } else {// if it is NATIVE MR , don't explore its plans try { new FeatureVisitor(mro.mapPlan, feature).visit(); if (mro.reducePlan.isEmpty()) { feature.set(PIG_FEATURE.MAP_ONLY.ordinal()); } else { new FeatureVisitor(mro.reducePlan, feature).visit(); } } catch (VisitorException e) { LOG.warn("Feature visitor failed", e); } } StringBuilder sb = new StringBuilder(); for (int i = feature.nextSetBit(0); i >= 0; i = feature.nextSetBit(i + 1)) { if (sb.length() > 0) sb.append(","); sb.append(PIG_FEATURE.values()[i].name()); } retStr = sb.toString(); featureMap.put(mro, retStr); } return retStr; }
From source file:org.openecomp.sdnc.sli.aai.AAIRequest.java
protected String getRequestPath() throws MalformedURLException { Set<String> uniqueResources = extractUniqueResourceSetFromKeys(requestProperties.stringPropertyNames()); BitSet bitset = new BitSet(); for (String key : uniqueResources) { if (tagValues.containsKey(key)) { Object tmpValue = tagValues.get(key); if (tmpValue != null) { String value = tmpValue.toString(); int bitIndex = Integer.parseInt(value); bitset.set(bitIndex);/*w ww .j a v a2s. c o m*/ } } } String path = bitsetPaths.get(bitset); if (path == null) { throw new MalformedURLException( "PATH not found for key string containing valies :" + requestProperties.toString()); } return path; }
From source file:com.turn.ttorrent.client.SharedTorrent.java
/** * Create a new shared torrent from meta-info binary data. * * @param torrent The meta-info byte data. * @param parent The parent directory or location the torrent files. * @param seeder Whether we're a seeder for this torrent or not (disables * validation)./* ww w . j ava 2 s . c o m*/ * @param requestStrategy The request strategy implementation. * @throws FileNotFoundException If the torrent file location or * destination directory does not exist and can't be created. * @throws IOException If the torrent file cannot be read or decoded. */ public SharedTorrent(byte[] torrent, File parent, boolean seeder, RequestStrategy requestStrategy) throws FileNotFoundException, IOException, NoSuchAlgorithmException { super(torrent, seeder); if (parent == null || !parent.isDirectory()) { throw new IllegalArgumentException("Invalid parent directory!"); } String parentPath = parent.getCanonicalPath(); try { this.pieceLength = this.decoded_info.get("piece length").getInt(); this.piecesHashes = ByteBuffer.wrap(this.decoded_info.get("pieces").getBytes()); if (this.piecesHashes.capacity() / Torrent.PIECE_HASH_SIZE * (long) this.pieceLength < this.getSize()) { throw new IllegalArgumentException( "Torrent size does not " + "match the number of pieces and the piece size!"); } } catch (InvalidBEncodingException ibee) { throw new IllegalArgumentException("Error reading torrent meta-info fields!"); } List<FileStorage> files = new LinkedList<FileStorage>(); long offset = 0L; for (Torrent.TorrentFile file : this.files) { File actual = new File(parent, file.file.getPath()); if (!actual.getCanonicalPath().startsWith(parentPath)) { throw new SecurityException("Torrent file path attempted " + "to break directory jail!"); } actual.getParentFile().mkdirs(); files.add(new FileStorage(actual, offset, file.size)); offset += file.size; } this.bucket = new FileCollectionStorage(files, this.getSize()); this.stop = false; this.uploaded = 0; this.downloaded = 0; this.left = this.getSize(); this.initialized = false; this.pieces = new Piece[0]; this.rarest = Collections.synchronizedSortedSet(new TreeSet<Piece>()); this.completedPieces = new BitSet(); this.requestedPieces = new BitSet(); //TODO: should switch to guice this.requestStrategy = requestStrategy; }
From source file:com.google.uzaygezen.core.BitVectorTest.java
private void checkCopyFrom(Function<Integer, BitVector> factory) { int size = 10; BitVector bv = factory.apply(size);/*from w ww .ja v a 2s .co m*/ for (long i = 1 << size; --i >= 0;) { bv.copyFrom(i); Assert.assertEquals(i, bv.toLong()); } final int bigSize = 1000; BitSet bs = new BitSet(); for (int i = 0; i < bigSize; i += 3) { bs.set(i); } bv = factory.apply(bigSize); bv.copyFrom(bs); for (int i = 0; i < bigSize; ++i) { Assert.assertEquals(i % 3 == 0, bv.get(i)); } }