Example usage for java.lang Long SIZE

List of usage examples for java.lang Long SIZE

Introduction

In this page you can find the example usage for java.lang Long SIZE.

Prototype

int SIZE

To view the source code for java.lang Long SIZE.

Click Source Link

Document

The number of bits used to represent a long value in two's complement binary form.

Usage

From source file:it.unimi.dsi.sux4j.mph.MinimalPerfectHashFunction.java

/** Low-level access to the output of this minimal perfect hash function.
 *
 * <p>This method makes it possible to build several kind of functions on the same {@link ChunkedHashStore} and
 * then retrieve the resulting values by generating a single triple of hashes. The method 
 * {@link TwoStepsMWHCFunction#getLong(Object)} is a good example of this technique.
 *
 * @param triple a triple generated as documented in {@link ChunkedHashStore}.
 * @return the output of the function.//from   w  w  w  . ja  v  a  2s.c o m
 */
public long getLongByTriple(final long[] triple) {
    if (n == 0)
        return defRetValue;
    final int[] e = new int[3];
    final int chunk = chunkShift == Long.SIZE ? 0 : (int) (triple[0] >>> chunkShift);
    final long chunkOffset = offset[chunk];
    HypergraphSorter.tripleToEdge(triple, seed[chunk], (int) (offset[chunk + 1] - chunkOffset), e);
    if (e[0] == -1)
        return defRetValue;
    final long result = rank(chunkOffset + e[(int) (values.getLong(e[0] + chunkOffset)
            + values.getLong(e[1] + chunkOffset) + values.getLong(e[2] + chunkOffset)) % 3]);
    if (signatureMask != 0)
        return result >= n || signatures.getLong(result) != (triple[0] & signatureMask) ? defRetValue : result;
    // Out-of-set strings can generate bizarre 3-hyperedges.
    return result < n ? result : defRetValue;
}

From source file:it.unimi.dsi.sux4j.io.ChunkedHashStore.java

/** Generate a list of signatures using the lowest bits of the first hash in this store.
 * //from  w  w w. j  av a2  s .  co  m
 * <p>For this method to work, this store must contain ranks.
 * 
 * @param signatureWidth the width in bits of the signatures.
 * @param pl a progress logger.
 */

public LongBigList signatures(final int signatureWidth, final ProgressLogger pl) throws IOException {
    final LongBigList signatures = LongArrayBitVector.getInstance().asLongBigList(signatureWidth);
    final long signatureMask = -1L >>> Long.SIZE - signatureWidth;
    signatures.size(size());
    pl.expectedUpdates = size();
    pl.itemsName = "signatures";
    pl.start("Signing...");
    for (ChunkedHashStore.Chunk chunk : this) {
        final Iterator<long[]> chunkIterator = chunk.iterator();
        for (int i = chunk.size(); i-- != 0;) {
            final long[] quadruple = chunkIterator.next();
            signatures.set(quadruple[3], signatureMask & quadruple[0]);
            pl.lightUpdate();
        }
    }
    pl.done();
    return signatures;
}

From source file:it.unimi.dsi.sux4j.mph.GOVMinimalPerfectHashFunction.java

/** A dirty function replicating the behaviour of {@link #getLongByTriple(long[])} but skipping the
 * signature test. Used in the constructor. <strong>Must</strong> be kept in sync with {@link #getLongByTriple(long[])}. */
private long getLongByTripleNoCheck(final long[] triple, final int[] e) {
    final int chunk = chunkShift == Long.SIZE ? 0 : (int) (triple[0] >>> chunkShift);
    final long edgeOffsetSeed = edgeOffsetAndSeed[chunk];
    final long chunkOffset = vertexOffset(edgeOffsetSeed);
    Linear3SystemSolver.tripleToEquation(triple, edgeOffsetSeed & ~OFFSET_MASK,
            (int) (vertexOffset(edgeOffsetAndSeed[chunk + 1]) - chunkOffset), e);
    return (edgeOffsetSeed & OFFSET_MASK)
            + countNonzeroPairs(chunkOffset,
                    chunkOffset + e[(int) (values.getLong(e[0] + chunkOffset)
                            + values.getLong(e[1] + chunkOffset) + values.getLong(e[2] + chunkOffset)) % 3],
                    array);//from   ww  w. j a  va  2s . c  o  m
}

From source file:it.unimi.dsi.sux4j.mph.MinimalPerfectHashFunction.java

/** A dirty function replicating the behaviour of {@link #getLongByTriple(long[])} but skipping the
 * signature test. Used in the constructor. <strong>Must</strong> be kept in sync with {@link #getLongByTriple(long[])}. */
private long getLongByTripleNoCheck(final long[] triple, final int[] e) {
    final int chunk = chunkShift == Long.SIZE ? 0 : (int) (triple[0] >>> chunkShift);
    final long chunkOffset = offset[chunk];
    HypergraphSorter.tripleToEdge(triple, seed[chunk], (int) (offset[chunk + 1] - chunkOffset), e);
    return rank(chunkOffset + e[(int) (values.getLong(e[0] + chunkOffset) + values.getLong(e[1] + chunkOffset)
            + values.getLong(e[2] + chunkOffset)) % 3]);
}

From source file:it.unimi.dsi.sux4j.io.ChunkedHashStore.java

/** Sets the number of chunks.
 * //from   w ww . j a  v  a  2 s . com
 * <p>Once the store is filled, you must call this method to set the number of chunks. The store will take
 * care of merging or fragmenting disk chunks to get exactly the desired chunks.
 * 
 * @param log2chunks the base-2 logarithm of the number of chunks.
 * @return the shift to be applied to the first hash of a triple to get the chunk number (see the {@linkplain ChunkedHashStore introduction}).
 */

public int log2Chunks(final int log2chunks) {
    this.chunks = 1 << log2chunks;
    diskChunkStep = (int) Math.max(DISK_CHUNKS / chunks, 1);
    virtualDiskChunks = DISK_CHUNKS / diskChunkStep;

    if (DEBUG) {
        System.err.print("Chunk sizes: ");
        double avg = filteredSize / (double) DISK_CHUNKS;
        double var = 0;
        for (int i = 0; i < DISK_CHUNKS; i++) {
            System.err.print(i + ":" + count[i] + " ");
            var += (count[i] - avg) * (count[i] - avg);
        }
        System.err.println();
        System.err.println("Average: " + avg);
        System.err.println("Variance: " + var / filteredSize);

    }

    chunkShift = Long.SIZE - log2chunks;

    LOGGER.debug("Number of chunks: " + chunks);
    LOGGER.debug("Number of disk chunks: " + DISK_CHUNKS);
    LOGGER.debug("Number of virtual disk chunks: " + virtualDiskChunks);

    return chunkShift;
}

From source file:it.unimi.dsi.sux4j.mph.CHDMinimalPerfectHashFunction.java

/**
 * Creates a new CHD minimal perfect hash function for the given keys.
 * /*w  w  w  . ja va 2  s.c  o  m*/
 * @param keys the keys to hash, or {@code null}.
 * @param transform a transformation strategy for the keys.
 * @param lambda the average bucket size.
 * @param loadFactor the load factor.
 * @param signatureWidth a signature width, or 0 for no signature.
 * @param tempDir a temporary directory for the store files, or {@code null} for the standard temporary directory.
 * @param chunkedHashStore a chunked hash store containing the keys, or {@code null}; the store
 * can be unchecked, but in this case <code>keys</code> and <code>transform</code> must be non-{@code null}. 
 */
protected CHDMinimalPerfectHashFunction(final Iterable<? extends T> keys,
        final TransformationStrategy<? super T> transform, final int lambda, double loadFactor,
        final int signatureWidth, final File tempDir, ChunkedHashStore<T> chunkedHashStore) throws IOException {
    this.transform = transform;

    final ProgressLogger pl = new ProgressLogger(LOGGER);
    pl.displayLocalSpeed = true;
    pl.displayFreeMemory = true;
    final RandomGenerator r = new XorShift1024StarRandomGenerator();
    pl.itemsName = "keys";

    final boolean givenChunkedHashStore = chunkedHashStore != null;
    if (!givenChunkedHashStore) {
        chunkedHashStore = new ChunkedHashStore<T>(transform, tempDir, pl);
        chunkedHashStore.reset(r.nextLong());
        chunkedHashStore.addAll(keys.iterator());
    }
    n = chunkedHashStore.size();

    defRetValue = -1; // For the very few cases in which we can decide

    int log2NumChunks = Math.max(0, Fast.mostSignificantBit(n >> LOG2_CHUNK_SIZE));
    chunkShift = chunkedHashStore.log2Chunks(log2NumChunks);
    final int numChunks = 1 << log2NumChunks;

    LOGGER.debug("Number of chunks: " + numChunks);
    LOGGER.debug("Average chunk size: " + (double) n / numChunks);

    offsetNumBucketsSeed = new long[(numChunks + 1) * 3 + 2];

    int duplicates = 0;
    final LongArrayList holes = new LongArrayList();

    @SuppressWarnings("resource")
    final OfflineIterable<MutableLong, MutableLong> coefficients = new OfflineIterable<MutableLong, MutableLong>(
            new Serializer<MutableLong, MutableLong>() {

                @Override
                public void write(final MutableLong a, final DataOutput dos) throws IOException {
                    long x = a.longValue();
                    while ((x & ~0x7FL) != 0) {
                        dos.writeByte((int) (x | 0x80));
                        x >>>= 7;
                    }
                    dos.writeByte((int) x);
                }

                @Override
                public void read(final DataInput dis, final MutableLong x) throws IOException {
                    byte b = dis.readByte();
                    long t = b & 0x7F;
                    for (int shift = 7; (b & 0x80) != 0; shift += 7) {
                        b = dis.readByte();
                        t |= (b & 0x7FL) << shift;
                    }
                    x.setValue(t);
                }
            }, new MutableLong());

    for (;;) {
        LOGGER.debug("Generating minimal perfect hash function...");

        holes.clear();
        coefficients.clear();
        pl.expectedUpdates = numChunks;
        pl.itemsName = "chunks";
        pl.start("Analysing chunks... ");

        try {
            int chunkNumber = 0;

            for (ChunkedHashStore.Chunk chunk : chunkedHashStore) {
                /* We treat a chunk as a single hash function. The number of bins is thus
                 * the first prime larger than the chunk size divided by the load factor. */
                final int p = Primes.nextPrime((int) Math.ceil(chunk.size() / loadFactor) + 1);
                final boolean used[] = new boolean[p];

                final int numBuckets = (chunk.size() + lambda - 1) / lambda;
                numBuckets(chunkNumber + 1, numBuckets(chunkNumber) + numBuckets);
                final int[] cc0 = new int[numBuckets];
                final int[] cc1 = new int[numBuckets];
                @SuppressWarnings("unchecked")
                final ArrayList<long[]>[] bucket = new ArrayList[numBuckets];
                for (int i = bucket.length; i-- != 0;)
                    bucket[i] = new ArrayList<long[]>();

                tryChunk: for (;;) {
                    for (ArrayList<long[]> b : bucket)
                        b.clear();
                    Arrays.fill(used, false);

                    /* At each try, the allocation to keys to bucket is randomized differently. */
                    final long seed = r.nextLong();
                    // System.err.println( "Number of keys: " + chunk.size()  + " Number of bins: " + p + " seed: " + seed );
                    /* We distribute the keys in this chunks in the buckets. */
                    for (Iterator<long[]> iterator = chunk.iterator(); iterator.hasNext();) {
                        final long[] triple = iterator.next();
                        final long[] h = new long[3];
                        Hashes.spooky4(triple, seed, h);
                        final ArrayList<long[]> b = bucket[(int) ((h[0] >>> 1) % numBuckets)];
                        h[1] = (int) ((h[1] >>> 1) % p);
                        h[2] = (int) ((h[2] >>> 1) % (p - 1)) + 1;

                        // All elements in a bucket must have either different h[ 1 ] or different h[ 2 ]
                        for (long[] t : b)
                            if (t[1] == h[1] && t[2] == h[2]) {
                                LOGGER.info("Duplicate index" + Arrays.toString(t));
                                continue tryChunk;
                            }
                        b.add(h);
                    }

                    final int[] perm = Util.identity(bucket.length);
                    IntArrays.quickSort(perm, new AbstractIntComparator() {
                        private static final long serialVersionUID = 1L;

                        @Override
                        public int compare(int a0, int a1) {
                            return Integer.compare(bucket[a1].size(), bucket[a0].size());
                        }
                    });

                    for (int i = 0; i < perm.length;) {
                        final LinkedList<Integer> bucketsToDo = new LinkedList<Integer>();
                        final int size = bucket[perm[i]].size();
                        //System.err.println( "Bucket size: " + size );
                        int j;
                        // Gather indices of all buckets with the same size
                        for (j = i; j < perm.length && bucket[perm[j]].size() == size; j++)
                            bucketsToDo.add(Integer.valueOf(perm[j]));

                        // Examine for each pair (c0,c1) the buckets still to do
                        ext: for (int c1 = 0; c1 < p; c1++)
                            for (int c0 = 0; c0 < p; c0++) {
                                //System.err.println( "Testing " + c0 + ", " + c1 + " (to do: " + bucketsToDo.size() + ")" );
                                for (Iterator<Integer> iterator = bucketsToDo.iterator(); iterator.hasNext();) {
                                    final int k = iterator.next().intValue();
                                    final ArrayList<long[]> b = bucket[k];
                                    boolean completed = true;
                                    final IntArrayList done = new IntArrayList();
                                    // Try to see whether the necessary entries are not used
                                    for (long[] h : b) {
                                        //assert k == h[ 0 ];

                                        int pos = (int) ((h[1] + c0 * h[2] + c1) % p);
                                        //System.err.println( "Testing pos " + pos + " for " + Arrays.toString( e  ));
                                        if (used[pos]) {
                                            completed = false;
                                            break;
                                        } else {
                                            used[pos] = true;
                                            done.add(pos);
                                        }
                                    }

                                    if (completed) {
                                        // All positions were free
                                        cc0[k] = c0;
                                        cc1[k] = c1;
                                        iterator.remove();
                                    } else
                                        for (int d : done)
                                            used[d] = false;
                                }
                                if (bucketsToDo.isEmpty())
                                    break ext;
                            }
                        if (!bucketsToDo.isEmpty())
                            continue tryChunk;

                        seed(chunkNumber, seed);
                        i = j;
                    }
                    break;
                }

                // System.err.println("DONE!");

                if (ASSERTS) {
                    final IntOpenHashSet pos = new IntOpenHashSet();
                    final long h[] = new long[3];
                    for (Iterator<long[]> iterator = chunk.iterator(); iterator.hasNext();) {
                        final long[] triple = iterator.next();
                        Hashes.spooky4(triple, seed(chunkNumber), h);
                        h[0] = (h[0] >>> 1) % numBuckets;
                        h[1] = (int) ((h[1] >>> 1) % p);
                        h[2] = (int) ((h[2] >>> 1) % (p - 1)) + 1;
                        //System.err.println( Arrays.toString(  e  ) );
                        assert pos.add((int) ((h[1] + cc0[(int) (h[0])] * h[2] + cc1[(int) (h[0])]) % p));
                    }
                }

                final MutableLong l = new MutableLong();
                for (int i = 0; i < numBuckets; i++) {
                    l.setValue(cc0[i] + cc1[i] * p);
                    coefficients.add(l);
                }

                for (int i = 0; i < p; i++)
                    if (!used[i])
                        holes.add(offset(chunkNumber) + i);

                offset(chunkNumber + 1, offset(chunkNumber) + p);
                chunkNumber++;
                pl.update();
            }

            pl.done();
            break;
        } catch (ChunkedHashStore.DuplicateException e) {
            if (keys == null)
                throw new IllegalStateException(
                        "You provided no keys, but the chunked hash store was not checked");
            if (duplicates++ > 3)
                throw new IllegalArgumentException("The input list contains duplicates");
            LOGGER.warn("Found duplicate. Recomputing triples...");
            chunkedHashStore.reset(r.nextLong());
            chunkedHashStore.addAll(keys.iterator());
        }
    }

    rank = new SparseRank(offset(offsetNumBucketsSeed.length / 3 - 1), holes.size(), holes.iterator());

    globalSeed = chunkedHashStore.seed();

    this.coefficients = new EliasFanoLongBigList(new AbstractLongIterator() {
        final OfflineIterator<MutableLong, MutableLong> iterator = coefficients.iterator();

        @Override
        public boolean hasNext() {
            return iterator.hasNext();
        }

        public long nextLong() {
            return iterator.next().longValue();
        }
    }, 0, true);

    coefficients.close();

    LOGGER.info("Completed.");
    LOGGER.info("Actual bit cost per key: " + (double) numBits() / n);

    if (signatureWidth != 0) {
        signatureMask = -1L >>> Long.SIZE - signatureWidth;
        (signatures = LongArrayBitVector.getInstance().asLongBigList(signatureWidth)).size(n);
        pl.expectedUpdates = n;
        pl.itemsName = "signatures";
        pl.start("Signing...");
        for (ChunkedHashStore.Chunk chunk : chunkedHashStore) {
            Iterator<long[]> iterator = chunk.iterator();
            for (int i = chunk.size(); i-- != 0;) {
                final long[] triple = iterator.next();
                long t = getLongByTripleNoCheck(triple);
                signatures.set(t, signatureMask & triple[0]);
                pl.lightUpdate();
            }
        }
        pl.done();
    } else {
        signatureMask = 0;
        signatures = null;
    }

    if (!givenChunkedHashStore)
        chunkedHashStore.close();
}

From source file:it.unimi.dsi.sux4j.mph.CHDMinimalPerfectHashFunction.java

/**
 * Returns the number of bits used by this structure.
 * //  w w  w. j a  v  a  2  s .  com
 * @return the number of bits used by this structure.
 */
public long numBits() {
    return offsetNumBucketsSeed.length * Long.SIZE + coefficients.numBits() + rank.numBits();
}

From source file:it.unimi.dsi.sux4j.mph.CHDMinimalPerfectHashFunction.java

@SuppressWarnings("unchecked")
public long getLong(final Object key) {
    if (n == 0)/* w  w  w .  j a v a 2s .  c  o  m*/
        return defRetValue;
    final long[] triple = new long[3];
    Hashes.spooky4(transform.toBitVector((T) key), globalSeed, triple);
    final int chunk = chunkShift == Long.SIZE ? 0 : (int) (triple[0] >>> chunkShift);
    final int index = chunk * 3;
    final long[] offsetNumBucketsSeed = this.offsetNumBucketsSeed;
    final long chunkOffset = offsetNumBucketsSeed[index];
    final int p = (int) (offsetNumBucketsSeed[index + 3] - chunkOffset);

    final long[] h = new long[3];
    Hashes.spooky4(triple, offsetNumBucketsSeed[index + 2], h);
    h[1] = (int) ((h[1] >>> 1) % p);
    h[2] = (int) ((h[2] >>> 1) % (p - 1)) + 1;

    final long numBuckets = offsetNumBucketsSeed[index + 1];
    final long c = coefficients
            .getLong(numBuckets + (h[0] >>> 1) % (offsetNumBucketsSeed[index + 4] - numBuckets));

    long result = chunkOffset + (int) ((h[1] + (c % p) * h[2] + c / p) % p);
    result -= rank.rank(result);

    if (signatureMask != 0)
        return result >= n || ((signatures.getLong(result) ^ triple[0]) & signatureMask) != 0 ? defRetValue
                : result;
    // Out-of-set strings can generate bizarre 3-hyperedges.
    return result < n ? result : defRetValue;
}

From source file:it.unimi.dsi.sux4j.mph.GOV4Function.java

/** Creates a new function for the given keys and values.
 * /*ww w .j av a2  s. c  o m*/
 * @param keys the keys in the domain of the function, or {@code null}.
 * @param transform a transformation strategy for the keys.
 * @param signatureWidth a positive number for a signature width, 0 for no signature, a negative value for a self-signed function; if nonzero, {@code values} must be {@code null} and {@code width} must be -1.
 * @param values values to be assigned to each element, in the same order of the iterator returned by <code>keys</code>; if {@code null}, the
 * assigned value will the the ordinal number of each element.
 * @param dataWidth the bit width of the <code>values</code>, or -1 if <code>values</code> is {@code null}.
 * @param tempDir a temporary directory for the store files, or {@code null} for the standard temporary directory.
 * @param chunkedHashStore a chunked hash store containing the keys associated with their ranks (if there are no values, or {@code indirect} is true)
 * or values, or {@code null}; the store
 * can be unchecked, but in this case <code>keys</code> and <code>transform</code> must be non-{@code null}. 
 * @param indirect if true, <code>chunkedHashStore</code> contains ordinal positions, and <code>values</code> is a {@link LongIterable} that
 * must be accessed to retrieve the actual values. 
 */
protected GOV4Function(final Iterable<? extends T> keys, final TransformationStrategy<? super T> transform,
        int signatureWidth, final LongIterable values, final int dataWidth, final File tempDir,
        ChunkedHashStore<T> chunkedHashStore, final boolean indirect) throws IOException {
    this.transform = transform;

    if (signatureWidth != 0 && values != null)
        throw new IllegalArgumentException("You cannot sign a function if you specify its values");
    if (signatureWidth != 0 && dataWidth != -1)
        throw new IllegalArgumentException("You cannot specify a signature width and a data width");

    final ProgressLogger pl = new ProgressLogger(LOGGER);
    pl.displayLocalSpeed = true;
    pl.displayFreeMemory = true;
    final RandomGenerator r = new XorShift1024StarRandomGenerator();
    pl.itemsName = "keys";

    final boolean givenChunkedHashStore = chunkedHashStore != null;
    if (!givenChunkedHashStore) {
        if (keys == null)
            throw new IllegalArgumentException(
                    "If you do not provide a chunked hash store, you must provide the keys");
        chunkedHashStore = new ChunkedHashStore<T>(transform, tempDir, -Math.min(signatureWidth, 0), pl);
        chunkedHashStore.reset(r.nextLong());
        if (values == null || indirect)
            chunkedHashStore.addAll(keys.iterator());
        else
            chunkedHashStore.addAll(keys.iterator(), values != null ? values.iterator() : null);
    }
    n = chunkedHashStore.size();
    defRetValue = signatureWidth < 0 ? 0 : -1; // Self-signed maps get zero as default resturn value.

    if (n == 0) {
        m = this.globalSeed = chunkShift = this.width = 0;
        data = null;
        offsetAndSeed = null;
        signatureMask = 0;
        signatures = null;
        return;
    }

    int log2NumChunks = Math.max(0, Fast.mostSignificantBit(n >> LOG2_CHUNK_SIZE));
    chunkShift = chunkedHashStore.log2Chunks(log2NumChunks);
    final int numChunks = 1 << log2NumChunks;

    LOGGER.debug("Number of chunks: " + numChunks);

    offsetAndSeed = new long[numChunks + 1];

    this.width = signatureWidth < 0 ? -signatureWidth : dataWidth == -1 ? Fast.ceilLog2(n) : dataWidth;

    // Candidate data; might be discarded for compaction.
    @SuppressWarnings("resource")
    final OfflineIterable<BitVector, LongArrayBitVector> offlineData = new OfflineIterable<BitVector, LongArrayBitVector>(
            BitVectors.OFFLINE_SERIALIZER, LongArrayBitVector.getInstance());

    int duplicates = 0;

    for (;;) {
        LOGGER.debug("Generating GOV function with " + this.width + " output bits...");

        pl.expectedUpdates = numChunks;
        pl.itemsName = "chunks";
        pl.start("Analysing chunks... ");

        try {
            int q = 0;
            final LongArrayBitVector dataBitVector = LongArrayBitVector.getInstance();
            final LongBigList data = dataBitVector.asLongBigList(this.width);
            long unsolvable = 0;
            for (final ChunkedHashStore.Chunk chunk : chunkedHashStore) {

                offsetAndSeed[q + 1] = offsetAndSeed[q]
                        + Math.max((C_TIMES_256 * chunk.size() >>> 8), chunk.size() + 1);

                long seed = 0;
                final int v = (int) (offsetAndSeed[q + 1] - offsetAndSeed[q]);
                final Linear4SystemSolver<BitVector> solver = new Linear4SystemSolver<BitVector>(v,
                        chunk.size());

                for (;;) {
                    final boolean solved = solver.generateAndSolve(chunk, seed, new AbstractLongBigList() {
                        private final LongBigList valueList = indirect
                                ? (values instanceof LongList ? LongBigLists.asBigList((LongList) values)
                                        : (LongBigList) values)
                                : null;

                        @Override
                        public long size64() {
                            return chunk.size();
                        }

                        @Override
                        public long getLong(final long index) {
                            return indirect ? valueList.getLong(chunk.data(index)) : chunk.data(index);
                        }
                    });
                    unsolvable += solver.unsolvable;
                    if (solved)
                        break;
                    seed += SEED_STEP;
                    if (seed == 0)
                        throw new AssertionError("Exhausted local seeds");
                }

                this.offsetAndSeed[q] |= seed;

                dataBitVector.fill(false);
                data.size(v);
                q++;

                /* We assign values. */
                final long[] solution = solver.solution;
                for (int i = 0; i < solution.length; i++)
                    data.set(i, solution[i]);

                offlineData.add(dataBitVector);
                pl.update();
            }

            LOGGER.info("Unsolvable systems: " + unsolvable + "/" + numChunks + " ("
                    + Util.format(100.0 * unsolvable / numChunks) + "%)");

            pl.done();
            break;
        } catch (ChunkedHashStore.DuplicateException e) {
            if (keys == null)
                throw new IllegalStateException(
                        "You provided no keys, but the chunked hash store was not checked");
            if (duplicates++ > 3)
                throw new IllegalArgumentException("The input list contains duplicates");
            LOGGER.warn("Found duplicate. Recomputing triples...");
            chunkedHashStore.reset(r.nextLong());
            pl.itemsName = "keys";
            if (values == null || indirect)
                chunkedHashStore.addAll(keys.iterator());
            else
                chunkedHashStore.addAll(keys.iterator(), values != null ? values.iterator() : null);
        }
    }

    if (DEBUG)
        System.out.println("Offsets: " + Arrays.toString(offsetAndSeed));

    globalSeed = chunkedHashStore.seed();
    m = offsetAndSeed[offsetAndSeed.length - 1];
    final LongArrayBitVector dataBitVector = LongArrayBitVector.getInstance(m * this.width);
    this.data = dataBitVector.asLongBigList(this.width);

    OfflineIterator<BitVector, LongArrayBitVector> iterator = offlineData.iterator();
    while (iterator.hasNext())
        dataBitVector.append(iterator.next());
    iterator.close();

    offlineData.close();

    LOGGER.info("Completed.");
    LOGGER.info("Forecast bit cost per element: " + C * this.width);
    LOGGER.info("Actual bit cost per element: " + (double) numBits() / n);

    if (signatureWidth > 0) {
        signatureMask = -1L >>> Long.SIZE - signatureWidth;
        signatures = chunkedHashStore.signatures(signatureWidth, pl);
    } else if (signatureWidth < 0) {
        signatureMask = -1L >>> Long.SIZE + signatureWidth;
        signatures = null;
    } else {
        signatureMask = 0;
        signatures = null;
    }

    if (!givenChunkedHashStore)
        chunkedHashStore.close();
}

From source file:it.unimi.dsi.sux4j.mph.GOV4Function.java

@SuppressWarnings("unchecked")
public long getLong(final Object o) {
    if (n == 0)/*from w  w w . j  a v  a 2  s.c o m*/
        return defRetValue;
    final int[] e = new int[4];
    final long[] h = new long[3];
    Hashes.spooky4(transform.toBitVector((T) o), globalSeed, h);
    final int chunk = chunkShift == Long.SIZE ? 0 : (int) (h[0] >>> chunkShift);
    final long chunkOffset = offsetAndSeed[chunk] & OFFSET_MASK;
    Linear4SystemSolver.tripleToEquation(h, offsetAndSeed[chunk] & ~OFFSET_MASK,
            (int) ((offsetAndSeed[chunk + 1] & OFFSET_MASK) - chunkOffset), e);
    if (e[0] == -1)
        return defRetValue;
    final long e0 = e[0] + chunkOffset, e1 = e[1] + chunkOffset, e2 = e[2] + chunkOffset,
            e3 = e[3] + chunkOffset;

    final long result = data.getLong(e0) ^ data.getLong(e1) ^ data.getLong(e2) ^ data.getLong(e3);
    if (signatureMask == 0)
        return result;
    if (signatures != null)
        return result >= n || ((signatures.getLong(result) ^ h[0]) & signatureMask) != 0 ? defRetValue : result;
    else
        return ((result ^ h[0]) & signatureMask) != 0 ? defRetValue : 1;
}