List of usage examples for java.lang Integer numberOfLeadingZeros
@HotSpotIntrinsicCandidate public static int numberOfLeadingZeros(int i)
From source file:org.orbisgis.view.toc.actions.cui.legend.stats.Thresholds.java
/** * Gets a boxed means analysis using the provided data. If {@code classNumber} is not a power of two, * the greatest power of two that is lower than it will be used. * @param classNumber The number of classes * @return The thresholds//from w w w . j a v a 2 s .c om */ public SortedSet<Double> getBoxedMeans(int classNumber) { SortedSet<Double> ret = new TreeSet<Double>(); ret.add(stats.getMin()); int levels = classNumber == 0 ? 0 : 32 - Integer.numberOfLeadingZeros(classNumber) - 1; computeBoxedMeans(stats, ret, levels - 1); return ret; }
From source file:net.ripe.ipresource.Ipv4Address.java
@Override public int getCommonPrefixLength(UniqueIpResource other) { Validate.isTrue(getType() == other.getType(), "incompatible resource types"); long temp = value() ^ ((Ipv4Address) other).value(); return Integer.numberOfLeadingZeros((int) temp); }
From source file:net.ripe.ipresource.Ipv4Address.java
@Override public boolean isValidNetmask() { int leadingOnesCount = Integer.numberOfLeadingZeros(~(int) value()); int trailingZeroesCount = Integer.numberOfTrailingZeros((int) value()); return leadingOnesCount > 0 && (leadingOnesCount + trailingZeroesCount) == NUMBER_OF_BITS; }
From source file:jext2.Superblock.java
public int getAddressesPerBlockBits() { return 31 - Integer.numberOfLeadingZeros(getAddressesPerBlock()); }
From source file:org.apache.hadoop.fs.nfs.NFSv3FileSystemStore.java
public int getReadSizeBits() { int rtMax = fsInfo.getRtmax(); int readBlockSizeBits = space.getConfiguration().getNfsReadSizeBits(); if ((rtMax > 0) && ((1 << readBlockSizeBits) > rtMax)) { readBlockSizeBits = 31 - Integer.numberOfLeadingZeros(rtMax); }/*from ww w. j av a 2s.c om*/ return readBlockSizeBits; }
From source file:com.hurence.logisland.botsearch.Trace.java
/** * We represents our trace like a binary signal by assigning it to be 1 at * each connection start, and 0 in-between connections. To calculate a * high-quality FFT, we used a sampling interval * of 1=4th of the smallest time interval in the trace, which ensures that * we do not undersample. However, if the distance between two ows is * extremely small and large gaps occur between other ows of the trace, * this sampling method can lead to a Significant amount of data points. In * such cases, we limit the length of our FFT trace to 2^16 = 65 536 * datapoints and accept minor undersampling. We chose this value as the FFT * is fastest for a length of power of two * */// w w w. j a va 2s . com double[] sampleFlows() { //------------------------------------------------------- // start with best fit sample unit double deltaTime = smallestTimeInterval / 4.0; int sampleSize = (int) (biggestTimeInterval / deltaTime); // accept some undersampling to limit sample count int nearestPowerOf2 = sampleSize == 0 ? 0 : 32 - Integer.numberOfLeadingZeros(sampleSize - 1); if (nearestPowerOf2 > 16) { nearestPowerOf2 = 16; } // FFT works better with power of 2 sampleSize = (int) Math.pow(2, nearestPowerOf2); deltaTime = biggestTimeInterval / sampleSize; double[] samples = new double[sampleSize]; //------------------------------------------------------- // set 1 at each flow start, 0 elsewhere double durationSum = 0.0; for (int i = 0; i < durations.length; i++) { durationSum += durations[i]; int index = (int) (durationSum / deltaTime); // watch out out of bounds if (index >= sampleSize) { index = sampleSize - 1; } if (index >= 0 && index < samples.length) { samples[index] = 1.0; } } return samples; }
From source file:org.apache.hadoop.fs.nfs.NFSv3FileSystemStore.java
public int getWriteSizeBits() { int wtMax = fsInfo.getWtmax(); int writeBlockSizeBits = space.getConfiguration().getNfsWriteSizeBits(); if ((wtMax > 0) && ((1 << writeBlockSizeBits) > wtMax)) { writeBlockSizeBits = 31 - Integer.numberOfLeadingZeros(wtMax); }//from w w w. ja v a 2 s. c o m return writeBlockSizeBits; }
From source file:org.apache.hadoop.hive.llap.cache.BuddyAllocator.java
@VisibleForTesting public BuddyAllocator(boolean isDirectVal, boolean isMappedVal, int minAllocVal, int maxAllocVal, int arenaCount, long maxSizeVal, long defragHeadroom, String mapPath, MemoryManager memoryManager, LlapDaemonCacheMetrics metrics, String discardMethod) { isDirect = isDirectVal;/*from www .jav a 2s . c o m*/ isMapped = isMappedVal; minAllocation = minAllocVal; maxAllocation = maxAllocVal; if (isMapped) { try { cacheDir = Files.createTempDirectory(FileSystems.getDefault().getPath(mapPath), "llap-", RWX); } catch (IOException ioe) { // conf validator already checks this, so it will never trigger usually throw new AssertionError("Configured mmap directory should be writable", ioe); } } else { cacheDir = null; } arenaSize = validateAndDetermineArenaSize(arenaCount, maxSizeVal); maxSize = validateAndDetermineMaxSize(maxSizeVal); memoryManager.updateMaxSize(determineMaxMmSize(defragHeadroom, maxSize)); minAllocLog2 = 31 - Integer.numberOfLeadingZeros(minAllocation); maxAllocLog2 = 31 - Integer.numberOfLeadingZeros(maxAllocation); arenaSizeLog2 = 63 - Long.numberOfLeadingZeros(arenaSize); maxArenas = (int) (maxSize / arenaSize); arenas = new Arena[maxArenas]; for (int i = 0; i < maxArenas; ++i) { arenas[i] = new Arena(); } Arena firstArena = arenas[0]; firstArena.init(0); allocatedArenas.set(1); this.memoryManager = memoryManager; defragCounters = new AtomicLong[maxAllocLog2 - minAllocLog2 + 1]; for (int i = 0; i < defragCounters.length; ++i) { defragCounters[i] = new AtomicLong(0); } this.metrics = metrics; metrics.incrAllocatedArena(); boolean isBoth = null == discardMethod || "both".equalsIgnoreCase(discardMethod); doUseFreeListDiscard = isBoth || "freelist".equalsIgnoreCase(discardMethod); doUseBruteDiscard = isBoth || "brute".equalsIgnoreCase(discardMethod); ctxPool = new FixedSizedObjectPool<DiscardContext>(32, new FixedSizedObjectPool.PoolObjectHelper<DiscardContext>() { @Override public DiscardContext create() { return new DiscardContext(); } @Override public void resetBeforeOffer(DiscardContext t) { } }); }
From source file:edu.iu.daal_als.VLoadTask.java
private static int getArrLen(int numV) { return 1 << (32 - Integer.numberOfLeadingZeros(numV - 1)); }
From source file:haven.Utils.java
public static void float9995d(int word, float[] ret) { int xb = (word & 0x7f800000) >> 23, xs = ((word & 0x80000000) >> 31) & 1, yb = (word & 0x003fc000) >> 14, ys = ((word & 0x00400000) >> 22) & 1, zb = (word & 0x00001fd0) >> 5, zs = ((word & 0x00002000) >> 13) & 1; int me = (word & 0x1f) - 15; int xe = Integer.numberOfLeadingZeros(xb), ye = Integer.numberOfLeadingZeros(yb), ze = Integer.numberOfLeadingZeros(zb); if (xe == 32) ret[0] = 0;/*from ww w . j av a 2 s.co m*/ else ret[0] = Float .intBitsToFloat((xs << 31) | ((me - (xe - 24) + 126) << 23) | ((xb << (xe - 8)) & 0x007fffff)); if (ye == 32) ret[1] = 0; else ret[1] = Float .intBitsToFloat((ys << 31) | ((me - (ye - 24) + 126) << 23) | ((yb << (ye - 8)) & 0x007fffff)); if (ze == 32) ret[2] = 0; else ret[2] = Float .intBitsToFloat((zs << 31) | ((me - (ze - 24) + 126) << 23) | ((zb << (ze - 8)) & 0x007fffff)); }