List of usage examples for java.lang Long SIZE
int SIZE
To view the source code for java.lang Long SIZE.
Click Source Link
From source file:it.unimi.dsi.sux4j.mph.MinimalPerfectHashFunction.java
/** This method implements a two-level ranking scheme. It is essentially a * {@link Rank11} in which {@link Long#bitCount(long)} has been replaced * by {@link #countNonzeroPairs(long)}./* w w w .j a v a 2 s . c o m*/ * * @param pos the position to rank in the 2-bit value array. * @return the number of hinges before the specified position. */ private long rank(long pos) { pos *= 2; assert pos >= 0; assert pos <= bitVector.length(); int word = (int) (pos / Long.SIZE); final int block = word / (WORDS_PER_SUPERBLOCK / 2) & ~1; final int offset = ((word % WORDS_PER_SUPERBLOCK) / 6) - 1; long result = count[block] + (count[block + 1] >> 12 * (offset + (offset >>> 32 - 4 & 6)) & 0x7FF) + countNonzeroPairs(array[word] & (1L << pos % Long.SIZE) - 1); for (int todo = (word & 0x1F) % 6; todo-- != 0;) result += countNonzeroPairs(array[--word]); return result; }
From source file:it.unimi.dsi.sux4j.mph.TwoStepsGOV3Function.java
/** Creates a new two-step function for the given keys and values. * /* w ww . j a v a 2 s. c om*/ * @param keys the keys in the domain of the function. * @param transform a transformation strategy for the keys. * @param values values to be assigned to each key, in the same order of the iterator returned by <code>keys</code>; if {@code null}, the * assigned value will the the ordinal number of each key. * @param tempDir a temporary directory for the store files, or {@code null} for the standard temporary directory. * @param chunkedHashStore a chunked hash store containing the keys associated with their rank, or {@code null}; the store * can be unchecked, but in this case <code>keys</code> and <code>transform</code> must be non-{@code null}. */ protected TwoStepsGOV3Function(final Iterable<? extends T> keys, final TransformationStrategy<? super T> transform, final LongBigList values, final File tempDir, ChunkedHashStore<T> chunkedHashStore) throws IOException { this.transform = transform; final ProgressLogger pl = new ProgressLogger(LOGGER); pl.displayLocalSpeed = true; pl.displayFreeMemory = true; final RandomGenerator random = new XorShift1024StarRandomGenerator(); pl.itemsName = "keys"; final boolean givenChunkedHashStore = chunkedHashStore != null; if (!givenChunkedHashStore) { if (keys == null) throw new IllegalArgumentException( "If you do not provide a chunked hash store, you must provide the keys"); chunkedHashStore = new ChunkedHashStore<T>(transform, pl); chunkedHashStore.reset(random.nextLong()); chunkedHashStore.addAll(keys.iterator()); } n = chunkedHashStore.size(); defRetValue = -1; // For the very few cases in which we can decide if (n == 0) { rankMean = escape = width = 0; firstFunction = secondFunction = null; remap = null; if (!givenChunkedHashStore) chunkedHashStore.close(); return; } // Compute distribution of values and maximum number of bits. int w = 0, size; long v; final Long2LongOpenHashMap counts = new Long2LongOpenHashMap(); counts.defaultReturnValue(-1); for (LongIterator i = values.iterator(); i.hasNext();) { v = i.nextLong(); counts.put(v, counts.get(v) + 1); size = Fast.length(v); if (size > w) w = size; } this.width = w; final int m = counts.size(); LOGGER.debug("Generating two-steps GOV3 function with " + w + " output bits..."); // Sort keys by reverse frequency final long[] keysArray = counts.keySet().toLongArray(new long[m]); LongArrays.quickSort(keysArray, 0, keysArray.length, new AbstractLongComparator() { private static final long serialVersionUID = 1L; public int compare(final long a, final long b) { return Long.signum(counts.get(b) - counts.get(a)); } }); long mean = 0; for (int i = 0; i < keysArray.length; i++) mean += i * counts.get(keysArray[i]); rankMean = (double) mean / n; // Analyze data and choose a threshold long post = n, bestCost = Long.MAX_VALUE; int pos = 0, best = -1; // Examine every possible choice for r. Note that r = 0 implies one function, so we do not need to test the case r == w. for (int r = 0; r < w && pos < m; r++) { /* This cost function is dependent on the implementation of GOV3Function. * Note that for r = 0 we are actually computing the cost of a single function (the first one). */ final long cost = (long) Math.min(GOV3Function.C * n * 1.126 + n * r, GOV3Function.C * n * r) + (long) Math.min(GOV3Function.C * post * 1.126 + post * w, GOV3Function.C * post * w) + pos * Long.SIZE; if (cost < bestCost) { best = r; bestCost = cost; } /* We add to pre and subtract from post the counts of keys from position (1<<r)-1 to position (1<<r+1)-1. */ for (int j = 0; j < (1 << r) && pos < m; j++) { final long c = counts.get(keysArray[pos++]); post -= c; } } if (ASSERTS) assert pos == m; counts.clear(); counts.trim(); // We must keep the remap array small. if (best >= Integer.SIZE) best = Integer.SIZE - 1; LOGGER.debug("Best threshold: " + best); escape = (1 << best) - 1; System.arraycopy(keysArray, 0, remap = new long[escape], 0, remap.length); final Long2LongOpenHashMap map = new Long2LongOpenHashMap(); map.defaultReturnValue(-1); for (int i = 0; i < escape; i++) map.put(remap[i], i); if (best != 0) { firstFunction = new GOV3Function.Builder<T>().keys(keys).transform(transform).store(chunkedHashStore) .values(new AbstractLongBigList() { public long getLong(long index) { long value = map.get(values.getLong(index)); return value == -1 ? escape : value; } public long size64() { return n; } }, best).indirect().build(); LOGGER.debug("Actual bit cost per key of first function: " + (double) firstFunction.numBits() / n); } else firstFunction = null; chunkedHashStore.filter(new Predicate() { public boolean evaluate(Object triple) { return firstFunction == null || firstFunction.getLongByTriple((long[]) triple) == escape; } }); secondFunction = new GOV3Function.Builder<T>().store(chunkedHashStore).values(values, w).indirect().build(); this.seed = chunkedHashStore.seed(); if (!givenChunkedHashStore) chunkedHashStore.close(); LOGGER.debug("Actual bit cost per key of second function: " + (double) secondFunction.numBits() / n); LOGGER.info("Actual bit cost per key: " + (double) numBits() / n); LOGGER.info("Completed."); }
From source file:it.unimi.dsi.sux4j.mph.TwoStepsMWHCFunction.java
/** Creates a new two-step function for the given keys and values. * /* www .jav a 2s.c o m*/ * @param keys the keys in the domain of the function. * @param transform a transformation strategy for the keys. * @param values values to be assigned to each key, in the same order of the iterator returned by <code>keys</code>; if {@code null}, the * assigned value will the the ordinal number of each key. * @param tempDir a temporary directory for the store files, or {@code null} for the standard temporary directory. * @param chunkedHashStore a chunked hash store containing the keys associated with their rank, or {@code null}; the store * can be unchecked, but in this case <code>keys</code> and <code>transform</code> must be non-{@code null}. */ protected TwoStepsMWHCFunction(final Iterable<? extends T> keys, final TransformationStrategy<? super T> transform, final LongBigList values, final File tempDir, ChunkedHashStore<T> chunkedHashStore) throws IOException { this.transform = transform; final ProgressLogger pl = new ProgressLogger(LOGGER); pl.displayLocalSpeed = true; pl.displayFreeMemory = true; final RandomGenerator random = new XorShift1024StarRandomGenerator(); pl.itemsName = "keys"; final boolean givenChunkedHashStore = chunkedHashStore != null; if (!givenChunkedHashStore) { if (keys == null) throw new IllegalArgumentException( "If you do not provide a chunked hash store, you must provide the keys"); chunkedHashStore = new ChunkedHashStore<T>(transform, pl); chunkedHashStore.reset(random.nextLong()); chunkedHashStore.addAll(keys.iterator()); } n = chunkedHashStore.size(); defRetValue = -1; // For the very few cases in which we can decide if (n == 0) { rankMean = escape = width = 0; firstFunction = secondFunction = null; remap = null; if (!givenChunkedHashStore) chunkedHashStore.close(); return; } // Compute distribution of values and maximum number of bits. int w = 0, size; long v; final Long2LongOpenHashMap counts = new Long2LongOpenHashMap(); counts.defaultReturnValue(-1); for (LongIterator i = values.iterator(); i.hasNext();) { v = i.nextLong(); counts.put(v, counts.get(v) + 1); size = Fast.length(v); if (size > w) w = size; } this.width = w; final int m = counts.size(); LOGGER.debug("Generating two-steps MWHC function with " + w + " output bits..."); // Sort keys by reverse frequency final long[] keysArray = counts.keySet().toLongArray(new long[m]); LongArrays.quickSort(keysArray, 0, keysArray.length, new AbstractLongComparator() { private static final long serialVersionUID = 1L; public int compare(final long a, final long b) { return Long.signum(counts.get(b) - counts.get(a)); } }); long mean = 0; for (int i = 0; i < keysArray.length; i++) mean += i * counts.get(keysArray[i]); rankMean = (double) mean / n; // Analyze data and choose a threshold long post = n, bestCost = Long.MAX_VALUE; int pos = 0, best = -1; // Examine every possible choice for r. Note that r = 0 implies one function, so we do not need to test the case r == w. for (int r = 0; r < w && pos < m; r++) { /* This cost function is dependent on the implementation of MWHCFunction. * Note that for r = 0 we are actually computing the cost of a single function (the first one). */ final long cost = (long) Math.min(HypergraphSorter.GAMMA * n * 1.126 + n * r, HypergraphSorter.GAMMA * n * r) + (long) Math.min(HypergraphSorter.GAMMA * post * 1.126 + post * w, HypergraphSorter.GAMMA * post * w) + pos * Long.SIZE; if (cost < bestCost) { best = r; bestCost = cost; } /* We add to pre and subtract from post the counts of keys from position (1<<r)-1 to position (1<<r+1)-1. */ for (int j = 0; j < (1 << r) && pos < m; j++) { final long c = counts.get(keysArray[pos++]); post -= c; } } if (ASSERTS) assert pos == m; counts.clear(); counts.trim(); // We must keep the remap array small. if (best >= Integer.SIZE) best = Integer.SIZE - 1; LOGGER.debug("Best threshold: " + best); escape = (1 << best) - 1; System.arraycopy(keysArray, 0, remap = new long[escape], 0, remap.length); final Long2LongOpenHashMap map = new Long2LongOpenHashMap(); map.defaultReturnValue(-1); for (int i = 0; i < escape; i++) map.put(remap[i], i); if (best != 0) { firstFunction = new MWHCFunction.Builder<T>().keys(keys).transform(transform).store(chunkedHashStore) .values(new AbstractLongBigList() { public long getLong(long index) { long value = map.get(values.getLong(index)); return value == -1 ? escape : value; } public long size64() { return n; } }, best).indirect().build(); LOGGER.debug("Actual bit cost per key of first function: " + (double) firstFunction.numBits() / n); } else firstFunction = null; chunkedHashStore.filter(new Predicate() { public boolean evaluate(Object triple) { return firstFunction == null || firstFunction.getLongByTriple((long[]) triple) == escape; } }); secondFunction = new MWHCFunction.Builder<T>().store(chunkedHashStore).values(values, w).indirect().build(); this.seed = chunkedHashStore.seed(); if (!givenChunkedHashStore) chunkedHashStore.close(); LOGGER.debug("Actual bit cost per key of second function: " + (double) secondFunction.numBits() / n); LOGGER.info("Actual bit cost per key: " + (double) numBits() / n); LOGGER.info("Completed."); }
From source file:it.unimi.dsi.sux4j.io.ChunkedHashStore.java
/** Creates a chunked hash store with given transformation strategy and progress logger. * /* w ww. j a v a 2 s .com*/ * @param transform a transformation strategy for the elements. * @param tempDir a temporary directory for the store files, or {@code null} for the current directory. * @param hashWidth if nonzero, no associated data is saved in the store: {@link Chunk#data(long)} will return this many lower bits * of the first of the three hashes associated with the key. * @param pl a progress logger, or {@code null}. */ public ChunkedHashStore(final TransformationStrategy<? super T> transform, final File tempDir, final int hashWidth, final ProgressLogger pl) throws IOException { this.transform = transform; this.pl = pl; this.tempDir = tempDir; this.hashMask = hashWidth == 0 ? 0 : -1L >>> Long.SIZE - hashWidth; file = new File[DISK_CHUNKS]; dos = new DataOutputStream[DISK_CHUNKS]; // Create disk chunks for (int i = 0; i < DISK_CHUNKS; i++) { dos[i] = new DataOutputStream(new FastBufferedOutputStream(new FileOutputStream(file[i] = File .createTempFile(ChunkedHashStore.class.getSimpleName(), String.valueOf(i), tempDir)), OUTPUT_BUFFER_SIZE)); file[i].deleteOnExit(); } count = new int[DISK_CHUNKS]; }
From source file:it.unimi.di.big.mg4j.index.DiskBasedIndex.java
/** Commodity method for loading a big list of binary longs with specified endianness into a {@linkplain LongBigArrays long big array}. * //from ww w .j a v a2 s. c om * @param ioFactory the factory that will be used to perform I/O. * @param filename the file containing the longs. * @param byteOrder the endianness of the longs. * @return a big list of longs containing the longs in <code>file</code>. */ public static LongBigArrayBigList loadLongBigList(final IOFactory ioFactory, final CharSequence filename, final ByteOrder byteOrder) throws IOException { final long length = ioFactory.length(filename.toString()) / (Long.SIZE / Byte.SIZE); ReadableByteChannel channel = ioFactory.getReadableByteChannel(filename.toString()); final LongBigArrayBigList loadLongBigList = loadLongBigList(channel, length, byteOrder); channel.close(); return loadLongBigList; }
From source file:edu.umass.cs.utils.Util.java
public static byte[] longToBytes(long value) { int size = Long.SIZE / 8; byte[] buf = new byte[size]; for (int i = 0; i < size; i++) buf[i] = (byte) ((value >> ((size - i - 1) * 8)) & 255); return buf;/*from w ww. j a v a2s . co m*/ }
From source file:org.cloudata.core.common.io.CWritableUtils.java
public static int getLongByteSize() { return Long.SIZE / 8; }
From source file:org.apache.hadoop.hive.serde2.compression.SnappyCompDe.java
private int writePrimitives(long[] primitives, ByteBuffer output) throws IOException { int bytesWritten = Snappy.rawCompress(primitives, 0, primitives.length * Long.SIZE / Byte.SIZE, output.array(), output.arrayOffset() + output.position()); output.position(output.position() + bytesWritten); return bytesWritten; }
From source file:org.eclipse.january.dataset.DTypeUtils.java
/** * @param dtype// w w w .j a va2 s . com * @param isize * number of elements in an item * @return length of single item in bytes */ public static int getItemBytes(final int dtype, final int isize) { int size; switch (dtype) { case Dataset.BOOL: size = 1; // How is this defined? break; case Dataset.INT8: case Dataset.ARRAYINT8: size = Byte.SIZE / 8; break; case Dataset.INT16: case Dataset.ARRAYINT16: case Dataset.RGB: size = Short.SIZE / 8; break; case Dataset.INT32: case Dataset.ARRAYINT32: size = Integer.SIZE / 8; break; case Dataset.INT64: case Dataset.ARRAYINT64: size = Long.SIZE / 8; break; case Dataset.FLOAT32: case Dataset.ARRAYFLOAT32: case Dataset.COMPLEX64: size = Float.SIZE / 8; break; case Dataset.FLOAT64: case Dataset.ARRAYFLOAT64: case Dataset.COMPLEX128: size = Double.SIZE / 8; break; default: size = 0; break; } return size * isize; }
From source file:org.kiji.schema.FormattedEntityId.java
/** * Decode a byte array containing an hbase row key into an ordered list corresponding to * the key format in the layout file./* w w w.ja va 2 s. com*/ * * @param format The row key format as specified in the layout file. * @param hbaseRowKey A byte array containing the hbase row key. * @return An ordered list of component values in the key. */ private static List<Object> makeKijiRowKey(RowKeyFormat2 format, byte[] hbaseRowKey) { if (hbaseRowKey.length == 0) { throw new EntityIdException("Invalid hbase row key"); } List<Object> kijiRowKey = new ArrayList<Object>(); // skip over the hash int pos = format.getSalt().getHashSize(); // we are suppressing materialization, so the components cannot be retrieved. int kijiRowElem = 0; if (format.getSalt().getSuppressKeyMaterialization()) { if (pos < hbaseRowKey.length) { throw new EntityIdException("Extra bytes in key after hash when materialization is" + "suppressed"); } return null; } ByteBuffer buf; while (kijiRowElem < format.getComponents().size() && pos < hbaseRowKey.length) { switch (format.getComponents().get(kijiRowElem).getType()) { case STRING: // Read the row key until we encounter a Null (0) byte or end. int endpos = pos; while (endpos < hbaseRowKey.length && (hbaseRowKey[endpos] != (byte) 0)) { endpos += 1; } String str = null; try { str = new String(hbaseRowKey, pos, endpos - pos, "UTF-8"); } catch (UnsupportedEncodingException e) { LOG.error(e.toString()); throw new EntityIdException(String.format("UnsupportedEncoding for component %d", kijiRowElem)); } kijiRowKey.add(str); pos = endpos + 1; break; case INTEGER: // Toggle highest order bit to return to original 2's complement. hbaseRowKey[pos] = (byte) ((int) hbaseRowKey[pos] ^ (int) Byte.MIN_VALUE); try { buf = ByteBuffer.wrap(hbaseRowKey, pos, Integer.SIZE / Byte.SIZE); } catch (IndexOutOfBoundsException e) { throw new EntityIdException("Malformed hbase Row Key"); } kijiRowKey.add(Integer.valueOf(buf.getInt())); pos = pos + Integer.SIZE / Byte.SIZE; break; case LONG: // Toggle highest order bit to return to original 2's complement. hbaseRowKey[pos] = (byte) ((int) hbaseRowKey[pos] ^ (int) Byte.MIN_VALUE); try { buf = ByteBuffer.wrap(hbaseRowKey, pos, Long.SIZE / Byte.SIZE); } catch (IndexOutOfBoundsException e) { throw new EntityIdException("Malformed hbase Row Key"); } kijiRowKey.add(Long.valueOf(buf.getLong())); pos = pos + Long.SIZE / Byte.SIZE; break; default: throw new RuntimeException("Invalid code path"); } kijiRowElem += 1; } // Fail if there are extra bytes in hbase row key. if (pos < hbaseRowKey.length) { throw new EntityIdException("Extra bytes in hbase row key cannot be mapped to any " + "component"); } // Fail if we encounter nulls before it is legal to do so. if (kijiRowElem < format.getNullableStartIndex()) { throw new EntityIdException("Too few components decoded from hbase row key. Component " + "number " + kijiRowElem + " cannot be null"); } // finish up with nulls for everything that wasn't in the key for (; kijiRowElem < format.getComponents().size(); kijiRowElem++) { kijiRowKey.add(null); } return kijiRowKey; }