List of usage examples for java.lang Long bitCount
@HotSpotIntrinsicCandidate public static int bitCount(long i)
From source file:net.myrrix.online.candidate.LocationSensitiveHash.java
@Override public Collection<Iterator<FastByIDMap.MapEntry<float[]>>> getCandidateIterator(float[][] userVectors) { long[] bitSignatures = new long[userVectors.length]; for (int i = 0; i < userVectors.length; i++) { bitSignatures[i] = toBitSignature(userVectors[i]); }/* w w w . java 2s. co m*/ Collection<Iterator<FastByIDMap.MapEntry<float[]>>> inputs = Lists.newArrayList(); for (FastByIDMap.MapEntry<long[]> entry : buckets.entrySet()) { for (long bitSignature : bitSignatures) { if (Long.bitCount(bitSignature ^ entry.getKey()) <= maxBitsDiffering) { // # bits differing inputs.add(new IDArrayToEntryIterator(entry.getValue())); break; } } } synchronized (newItems) { if (!newItems.isEmpty()) { // Have to clone because it's being written to inputs.add(new IDToEntryIterator(newItems.clone().iterator())); } } return inputs; }
From source file:com.google.uzaygezen.core.BitVectorTest.java
private void checkCopyFromBigEndian64Bits(Function<Integer, BitVector> factory) { int bits = 10; for (int shift = 0; shift < 64 - bits; ++shift) { int size = bits + shift; BitVector bv = factory.apply(size); for (long i = 1 << bits; --i >= 0;) { for (long k : new long[] { i << shift, i << shift | ((1L << shift) - 1) }) { byte[] bigEndian = Longs.toByteArray(k); int n = (size + 7) >>> 3; for (int j = 0; j < bigEndian.length - n; ++j) { assert bigEndian[j] == 0; }//from w ww . ja va 2 s. c o m byte[] bytes = Arrays.copyOfRange(bigEndian, bigEndian.length - n, bigEndian.length); bv.copyFromBigEndian(bytes); Assert.assertEquals(Long.bitCount(k), bv.cardinality()); if (k != bv.toExactLong()) { bv.copyFromBigEndian(bytes); bv.toExactLong(); } Assert.assertEquals(bv.getClass().toString(), k, bv.toExactLong()); } } } }
From source file:com.actionbarsherlock.internal.view.menu.ActionMenuView.java
private void onMeasureExactFormat(int widthMeasureSpec, int heightMeasureSpec) { // We already know the width mode is EXACTLY if we're here. final int heightMode = MeasureSpec.getMode(heightMeasureSpec); int widthSize = MeasureSpec.getSize(widthMeasureSpec); int heightSize = MeasureSpec.getSize(heightMeasureSpec); final int widthPadding = getPaddingLeft() + getPaddingRight(); final int heightPadding = getPaddingTop() + getPaddingBottom(); widthSize -= widthPadding;/*from w ww. j a v a 2 s . c o m*/ // Divide the view into cells. final int cellCount = widthSize / mMinCellSize; final int cellSizeRemaining = widthSize % mMinCellSize; if (cellCount == 0) { // Give up, nothing fits. setMeasuredDimension(widthSize, 0); return; } final int cellSize = mMinCellSize + cellSizeRemaining / cellCount; int cellsRemaining = cellCount; int maxChildHeight = 0; int maxCellsUsed = 0; int expandableItemCount = 0; int visibleItemCount = 0; boolean hasOverflow = false; // This is used as a bitfield to locate the smallest items present. Assumes childCount < 64. long smallestItemsAt = 0; final int childCount = getChildCount(); for (int i = 0; i < childCount; i++) { final View child = getChildAt(i); if (child.getVisibility() == GONE) continue; final boolean isGeneratedItem = child instanceof ActionMenuItemView; visibleItemCount++; if (isGeneratedItem) { // Reset padding for generated menu item views; it may change below // and views are recycled. child.setPadding(mGeneratedItemPadding, 0, mGeneratedItemPadding, 0); } final LayoutParams lp = (LayoutParams) child.getLayoutParams(); lp.expanded = false; lp.extraPixels = 0; lp.cellsUsed = 0; lp.expandable = false; lp.leftMargin = 0; lp.rightMargin = 0; lp.preventEdgeOffset = isGeneratedItem && ((ActionMenuItemView) child).hasText(); // Overflow always gets 1 cell. No more, no less. final int cellsAvailable = lp.isOverflowButton ? 1 : cellsRemaining; final int cellsUsed = measureChildForCells(child, cellSize, cellsAvailable, heightMeasureSpec, heightPadding); maxCellsUsed = Math.max(maxCellsUsed, cellsUsed); if (lp.expandable) expandableItemCount++; if (lp.isOverflowButton) hasOverflow = true; cellsRemaining -= cellsUsed; maxChildHeight = Math.max(maxChildHeight, child.getMeasuredHeight()); if (cellsUsed == 1) smallestItemsAt |= (1 << i); } // When we have overflow and a single expanded (text) item, we want to try centering it // visually in the available space even though overflow consumes some of it. final boolean centerSingleExpandedItem = hasOverflow && visibleItemCount == 2; // Divide space for remaining cells if we have items that can expand. // Try distributing whole leftover cells to smaller items first. boolean needsExpansion = false; while (expandableItemCount > 0 && cellsRemaining > 0) { int minCells = Integer.MAX_VALUE; long minCellsAt = 0; // Bit locations are indices of relevant child views int minCellsItemCount = 0; for (int i = 0; i < childCount; i++) { final View child = getChildAt(i); final LayoutParams lp = (LayoutParams) child.getLayoutParams(); // Don't try to expand items that shouldn't. if (!lp.expandable) continue; // Mark indices of children that can receive an extra cell. if (lp.cellsUsed < minCells) { minCells = lp.cellsUsed; minCellsAt = 1 << i; minCellsItemCount = 1; } else if (lp.cellsUsed == minCells) { minCellsAt |= 1 << i; minCellsItemCount++; } } // Items that get expanded will always be in the set of smallest items when we're done. smallestItemsAt |= minCellsAt; if (minCellsItemCount > cellsRemaining) break; // Couldn't expand anything evenly. Stop. // We have enough cells, all minimum size items will be incremented. minCells++; for (int i = 0; i < childCount; i++) { final View child = getChildAt(i); final LayoutParams lp = (LayoutParams) child.getLayoutParams(); if ((minCellsAt & (1 << i)) == 0) { // If this item is already at our small item count, mark it for later. if (lp.cellsUsed == minCells) smallestItemsAt |= 1 << i; continue; } if (centerSingleExpandedItem && lp.preventEdgeOffset && cellsRemaining == 1) { // Add padding to this item such that it centers. child.setPadding(mGeneratedItemPadding + cellSize, 0, mGeneratedItemPadding, 0); } lp.cellsUsed++; lp.expanded = true; cellsRemaining--; } needsExpansion = true; } // Divide any space left that wouldn't divide along cell boundaries // evenly among the smallest items final boolean singleItem = !hasOverflow && visibleItemCount == 1; if (cellsRemaining > 0 && smallestItemsAt != 0 && (cellsRemaining < visibleItemCount - 1 || singleItem || maxCellsUsed > 1)) { float expandCount = Long.bitCount(smallestItemsAt); if (!singleItem) { // The items at the far edges may only expand by half in order to pin to either side. if ((smallestItemsAt & 1) != 0) { LayoutParams lp = (LayoutParams) getChildAt(0).getLayoutParams(); if (!lp.preventEdgeOffset) expandCount -= 0.5f; } if ((smallestItemsAt & (1 << (childCount - 1))) != 0) { LayoutParams lp = ((LayoutParams) getChildAt(childCount - 1).getLayoutParams()); if (!lp.preventEdgeOffset) expandCount -= 0.5f; } } final int extraPixels = expandCount > 0 ? (int) (cellsRemaining * cellSize / expandCount) : 0; for (int i = 0; i < childCount; i++) { if ((smallestItemsAt & (1 << i)) == 0) continue; final View child = getChildAt(i); final LayoutParams lp = (LayoutParams) child.getLayoutParams(); if (child instanceof ActionMenuItemView) { // If this is one of our views, expand and measure at the larger size. lp.extraPixels = extraPixels; lp.expanded = true; if (i == 0 && !lp.preventEdgeOffset) { // First item gets part of its new padding pushed out of sight. // The last item will get this implicitly from layout. lp.leftMargin = -extraPixels / 2; } needsExpansion = true; } else if (lp.isOverflowButton) { lp.extraPixels = extraPixels; lp.expanded = true; lp.rightMargin = -extraPixels / 2; needsExpansion = true; } else { // If we don't know what it is, give it some margins instead // and let it center within its space. We still want to pin // against the edges. if (i != 0) { lp.leftMargin = extraPixels / 2; } if (i != childCount - 1) { lp.rightMargin = extraPixels / 2; } } } cellsRemaining = 0; } // Remeasure any items that have had extra space allocated to them. if (needsExpansion) { int heightSpec = MeasureSpec.makeMeasureSpec(heightSize - heightPadding, heightMode); for (int i = 0; i < childCount; i++) { final View child = getChildAt(i); final LayoutParams lp = (LayoutParams) child.getLayoutParams(); if (!lp.expanded) continue; final int width = lp.cellsUsed * cellSize + lp.extraPixels; child.measure(MeasureSpec.makeMeasureSpec(width, MeasureSpec.EXACTLY), heightSpec); } } if (heightMode != MeasureSpec.EXACTLY) { heightSize = maxChildHeight; } setMeasuredDimension(widthSize, heightSize); //UNUSED mMeasuredExtraWidth = cellsRemaining * cellSize; }
From source file:hybridewah.HybridBitmap.java
/** * reports the number of bits set to true. Running time is proportional to * compressed size (as reported by sizeInBytes). * /* w w w. j av a 2 s. c o m*/ * @return the number of bits set to true */ public int cardinality() { int counter = 0; if (this.verbatim) { for (int i = 0; i < this.actualsizeinwords; i++) { counter += Long.bitCount(this.buffer[i]); } } else { final EWAHIterator i = new EWAHIterator(this.buffer, this.actualsizeinwords); while (i.hasNext()) { RunningLengthWord localrlw = i.next(); if (localrlw.getRunningBit()) { counter += wordinbits * localrlw.getRunningLength(); } for (int j = 0; j < localrlw.getNumberOfLiteralWords(); ++j) { counter += Long.bitCount(i.buffer()[i.literalWords() + j]); } } } return counter; }
From source file:hybridewah.HybridBitmap.java
/** * get the locations of the true values as one vector. (may use more memory * than iterator())/* w w w.jav a 2s. c o m*/ * * @return the positions */ public int[] getPositionsOptimized() { // final ArrayList<Integer> v = new ArrayList<Integer>(); int[] v = new int[this.cardinality()]; if (this.verbatim) { int pos = 1; int vpos = 0; long data = 0; for (int i = 0; i < this.actualsizeinwords; i++) { data = this.buffer[i]; //if (data > 0) { while (data != 0) { //v.add(Long.bitCount(~(data^(-data)))+pos); v[vpos++] = Long.bitCount(~(data ^ (-data))) + pos; data = data & (data - 1); } pos += 64; //} } return v; } else { final EWAHIterator i = new EWAHIterator(this.buffer, this.actualsizeinwords); int pos = 1; int vpos = 0; while (i.hasNext()) { RunningLengthWord localrlw = i.next(); if (localrlw.getRunningBit()) { for (int j = 0; j < localrlw.getRunningLength(); ++j) { for (int c = 0; c < wordinbits; ++c) // v.add(new Integer(pos++)); v[vpos++] = pos++; } } else { pos += wordinbits * localrlw.getRunningLength(); } for (int j = 0; j < localrlw.getNumberOfLiteralWords(); ++j) { long data = i.buffer()[i.literalWords() + j]; while (data != 0) { //v.add(Long.bitCount(~(data^(-data)))+pos); v[vpos++] = Long.bitCount(~(data ^ (-data))) + pos; data = data & (data - 1); } pos += 64; } } return v; } }
From source file:org.apache.hadoop.hive.ql.exec.persistence.BytesBytesMultiHashMap.java
public BytesBytesMultiHashMap(int initialCapacity, float loadFactor, int wbSize, long maxProbeSize) { if (loadFactor < 0 || loadFactor > 1) { throw new AssertionError("Load factor must be between (0, 1]."); }//from w w w . j a v a 2 s .c om assert initialCapacity > 0; initialCapacity = (Long.bitCount(initialCapacity) == 1) ? initialCapacity : nextHighestPowerOfTwo(initialCapacity); // 8 bytes per long in the refs, assume data will be empty. This is just a sanity check. int maxCapacity = (maxProbeSize <= 0) ? DEFAULT_MAX_CAPACITY : (int) Math.min((long) DEFAULT_MAX_CAPACITY, maxProbeSize / 8); if (maxCapacity < DEFAULT_MIN_MAX_CAPACITY) { maxCapacity = DEFAULT_MIN_MAX_CAPACITY; } if (maxCapacity < initialCapacity || initialCapacity <= 0) { // Either initialCapacity is too large, or nextHighestPowerOfTwo overflows initialCapacity = (Long.bitCount(maxCapacity) == 1) ? maxCapacity : nextLowestPowerOfTwo(maxCapacity); } validateCapacity(initialCapacity); startingHashBitCount = 63 - Long.numberOfLeadingZeros(initialCapacity); this.loadFactor = loadFactor; refs = new long[initialCapacity]; writeBuffers = new WriteBuffers(wbSize, MAX_WB_SIZE); resizeThreshold = (int) (initialCapacity * this.loadFactor); }
From source file:org.apache.hadoop.hive.ql.exec.persistence.BytesBytesMultiHashMap.java
public void expandAndRehashToTarget(int estimateNewRowCount) { int oldRefsCount = refs.length; int newRefsCount = oldRefsCount + estimateNewRowCount; if (resizeThreshold <= newRefsCount) { newRefsCount = (Long.bitCount(newRefsCount) == 1) ? estimateNewRowCount : nextHighestPowerOfTwo(newRefsCount); expandAndRehashImpl(newRefsCount); LOG.info("Expand and rehash to " + newRefsCount + " from " + oldRefsCount); }/*from w w w . j a va 2 s . c o m*/ }
From source file:org.apache.hadoop.hive.ql.exec.persistence.BytesBytesMultiHashMap.java
private static void validateCapacity(long capacity) { if (Long.bitCount(capacity) != 1) { throw new AssertionError("Capacity must be a power of two"); }/*from ww w .j a va 2s. c om*/ if (capacity <= 0) { throw new AssertionError("Invalid capacity " + capacity); } if (capacity > Integer.MAX_VALUE) { throw new RuntimeException("Attempting to expand the hash table to " + capacity + " that overflows maximum array size. For this query, you may want to disable " + ConfVars.HIVEDYNAMICPARTITIONHASHJOIN.varname + " or reduce " + ConfVars.HIVECONVERTJOINNOCONDITIONALTASKTHRESHOLD.varname); } }
From source file:org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast.VectorMapJoinFastHashTable.java
private static void validateCapacity(long capacity) { if (Long.bitCount(capacity) != 1) { throw new AssertionError("Capacity must be a power of two"); }//from ww w . j av a 2 s. c om if (capacity <= 0) { throw new AssertionError("Invalid capacity " + capacity); } }
From source file:org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast.VectorMapJoinFastHashTable.java
public VectorMapJoinFastHashTable(int initialCapacity, float loadFactor, int writeBuffersSize) { initialCapacity = (Long.bitCount(initialCapacity) == 1) ? initialCapacity : nextHighestPowerOfTwo(initialCapacity); validateCapacity(initialCapacity);/* ww w . j a va 2s.c om*/ logicalHashBucketCount = initialCapacity; logicalHashBucketMask = logicalHashBucketCount - 1; resizeThreshold = (int) (logicalHashBucketCount * loadFactor); this.loadFactor = loadFactor; this.writeBuffersSize = writeBuffersSize; }