Example usage for com.google.common.primitives Longs toByteArray

List of usage examples for com.google.common.primitives Longs toByteArray

Introduction

In this page you can find the example usage for com.google.common.primitives Longs toByteArray.

Prototype

public static byte[] toByteArray(long value) 

Source Link

Document

Returns a big-endian representation of value in an 8-element byte array; equivalent to ByteBuffer.allocate(8).putLong(value).array() .

Usage

From source file:se.sics.datamodel.util.DMKeyFactory.java

private static byte[] serializeLexico(long val) throws IOException {
    Closer closer = Closer.create();//from  w w w.  j av  a  2  s .c  o m
    try {
        ByteArrayOutputStream baos = closer.register(new ByteArrayOutputStream());
        DataOutputStream w = closer.register(new DataOutputStream(baos));

        byte sign = (val < 0 ? (byte) 0 : (byte) 1);
        long absVal = Math.abs(val);
        byte[] iVal = Longs.toByteArray(absVal);

        w.write(INDEXVAL_LONG);
        w.write(sign);
        w.write(iVal);

        w.flush();

        return baos.toByteArray();
    } catch (Throwable e) {
        throw closer.rethrow(e);
    } finally {
        closer.close();
    }
}

From source file:parquet.avro.AvroWriteSupport.java

private void writeImpalaTimestamp(RecordConsumer recordConsumer, Long nanoseconds) {

    long milliseconds = nanoseconds / NANO_MILLI_CONV;
    JDateTime jdt = new JDateTime(milliseconds);

    // Get nanoseconds of the day and get bytes (little-endian format)
    long seconds_of_day = jdt.getMillisOfDay() / MILLI_SECOND_CONV;
    long nanoseconds_fraction_of_second = nanoseconds % NANO_CONV;
    long nanoseconds_of_day = seconds_of_day * NANO_CONV + nanoseconds_fraction_of_second;
    byte[] b_ns = Longs.toByteArray(nanoseconds_of_day);
    ArrayUtils.reverse(b_ns);//from w  ww .j  a  va  2 s .c  om

    // Get Julian Day and get bytes (little-endian format)
    byte[] b_julian_days = Ints.toByteArray(jdt.getJulianDayNumber());
    ArrayUtils.reverse(b_julian_days);

    // Fill buffer
    ByteBuffer buf = ByteBuffer.allocate(12);
    buf.put(b_ns).put(b_julian_days).flip();

    // Write
    recordConsumer.addBinary(Binary.fromByteBuffer(buf));
}

From source file:co.paralleluniverse.galaxy.berkeleydb.BerkeleyDB.java

@Override
public void allocate(short owner, long start, int num) {
    final DatabaseEntry key = new DatabaseEntry(Longs.toByteArray(start + num - 1));
    final DatabaseEntry value = new DatabaseEntry(Shorts.toByteArray(owner));

    final Transaction txn = null; // env.beginTransaction(null, null);
    try {/*from   w  ww .  j a  v  a  2 s  .  co m*/
        OperationStatus status = allocationDirectory.putNoOverwrite(txn, key, value);
        if (status != OperationStatus.SUCCESS) {
            LOG.debug("Bad status: {}", status);
            throw new AssertionError();
        }
        if (txn != null)
            txn.commit();
    } catch (Exception e) {
        LOG.error("Exception during DB operation. Aborting transaction.", e);
        if (txn != null)
            txn.abort();
        throw Throwables.propagate(e);
    }
}

From source file:dti.tdl.messaging.TDLMessageHandler.java

public static void SimFraming(TDLMessage message) {
    byte[] start = { (byte) 1 };
    byte[] startMsg = { (byte) 2 };
    byte[] endMsg = { (byte) 3 };
    byte[] end = { (byte) 4 };
    byte[] profile = message.getProfileId().substring(0, 4).getBytes(); //4 bytes profile id
    byte[] msgType = { message.getMsgType() }; // 1 byte message type
    byte[] from = hexStringToByteArray(message.getFromId());
    byte[] to = hexStringToByteArray(message.getToId());
    byte[] data = message.getMsg();
    int numBlk = 1;

    if (data.length > TDLMessageHandler.messageMaxBytes) {
        numBlk = (int) (data.length / TDLMessageHandler.messageMaxBytes);
        if (data.length % TDLMessageHandler.messageMaxBytes > 0) {
            numBlk++;/*from  ww  w  .  j ava 2s.  co  m*/
        }

    }
    //String utf8msg = new String()
    long checksum = CRC32Checksum(data);

    byte[] checksumBytes = Longs.toByteArray(checksum);

    byte[] frame = null;
    int msgLength = data.length;
    int msgIdx = 0;
    for (int i = 0; i < numBlk; i++) {
        if (msgLength > TDLMessageHandler.messageMaxBytes) {
            msgLength = (int) (msgLength - TDLMessageHandler.messageMaxBytes);
        }
        int msgBlkLength = (int) TDLMessageHandler.messageMaxBytes;
        if (i == numBlk - 1) {
            msgBlkLength = msgLength;
        }
        msgIdx = (int) (TDLMessageHandler.messageMaxBytes * i);
        frame = new byte[start.length + 1 + from.length + to.length + profile.length + checksumBytes.length
                + startMsg.length + msgBlkLength + endMsg.length + end.length];
        System.arraycopy(start, 0, frame, 0, 1);
        System.arraycopy(msgType, 0, frame, 1, 1);
        System.arraycopy(from, 0, frame, 2, from.length);
        System.arraycopy(to, 0, frame, 2 + from.length, to.length);
        System.arraycopy(profile, 0, frame, 2 + from.length + to.length, profile.length);
        System.arraycopy(checksumBytes, 0, frame, 2 + from.length + to.length + profile.length,
                checksumBytes.length);
        System.arraycopy(startMsg, 0, frame,
                2 + from.length + to.length + profile.length + checksumBytes.length, startMsg.length);
        System.arraycopy(Arrays.copyOfRange(data, msgIdx, msgIdx + msgBlkLength), 0, frame,
                2 + from.length + to.length + profile.length + checksumBytes.length + startMsg.length,
                msgBlkLength);
        System.arraycopy(endMsg, 0, frame, 2 + from.length + to.length + profile.length + checksumBytes.length
                + startMsg.length + msgBlkLength, endMsg.length);
        System.arraycopy(end, 0, frame, 2 + from.length + to.length + profile.length + checksumBytes.length
                + startMsg.length + msgBlkLength + endMsg.length, end.length);

        String txMsg = null;
        StringBuilder builder = new StringBuilder();

        for (int j = 0; j < frame.length; j++) {
            if (j < frame.length - 1) {
                builder.append((int) frame[j] + ",");
            } else {
                builder.append((int) frame[j]);
            }
        }
        txMsg = builder.toString();

        String[] txBytesStrArray = txMsg.split(",");
        byte[] txBytes = new byte[txBytesStrArray.length];
        //String frameMsg = "Sim Frame content (before): ";
        for (int j = 0; j < txBytesStrArray.length; j++) {
            int byteInt = Integer.parseInt(txBytesStrArray[j]);
            //frameMsg = frameMsg+" "+byteInt;
            txBytes[j] = (byte) byteInt;
        }
        //System.out.println(frameMsg);
        TDLMessageHandler.deFraming(txBytes);

    }
}

From source file:codes.writeonce.maven.plugins.soy.CompileMojo.java

private static void updateDigest(Path root, MessageDigest sourceDigest, Path soyFilePath) throws IOException {
    sourceDigest.update(soyFilePath.toString().getBytes(TEXT_DIGEST_CHARSET));
    final Path soyFile = root.resolve(soyFilePath);
    sourceDigest.update(Longs.toByteArray(Files.size(soyFile)));
    sourceDigest.update(Longs.toByteArray(Files.getLastModifiedTime(soyFile).toMillis()));
}

From source file:io.warp10.continuum.geo.GeoIndex.java

/**
 * Store the LKP index into a file// w  ww. j a  v  a2 s. c o  m
 */
public void dumpLKPIndex(File path) throws IOException {

    if (0 != this.depth) {
        return;
    }

    Set<String> gtsKeys = new HashSet<String>();

    synchronized (this.lkpIndex) {
        gtsKeys.addAll(this.lkpIndex.keySet());
    }

    OutputStream out = new FileOutputStream(path);

    try {
        for (String key : gtsKeys) {
            long[] cells = this.lkpIndex.get(key);

            if (null == cells) {
                continue;
            }

            byte[] id = key.getBytes(Charsets.UTF_8);

            out.write(id.length);
            out.write(id);

            out.write(cells.length);

            for (long cell : cells) {
                out.write(Longs.toByteArray(cell));
            }
        }
    } finally {
        out.close();
    }
}

From source file:com.datatorrent.contrib.dimensions.DimensionsStoreHDHT.java

/**
 * Method serializes the given {@link EventKey}.
 *
 * @param eventKey The {@link EventKey} to serialize.
 * @return The serialized {@link EventKey}.
 *//*from  ww w.jav a  2  s.  c  o  m*/
public synchronized byte[] getEventKeyBytesGAE(EventKey eventKey) {
    long timestamp = 0;

    if (eventKey.getKey().getFieldDescriptor().getFieldList().contains(DimensionsDescriptor.DIMENSION_TIME)) {
        //If key includes a time stamp retrieve it.
        timestamp = eventKey.getKey().getFieldLong(DimensionsDescriptor.DIMENSION_TIME);
    }

    //Time is a special case for HDHT all keys should be prefixed by a timestamp.
    byte[] timeBytes = Longs.toByteArray(timestamp);
    byte[] schemaIDBytes = Ints.toByteArray(eventKey.getSchemaID());
    byte[] dimensionDescriptorIDBytes = Ints.toByteArray(eventKey.getDimensionDescriptorID());
    byte[] aggregatorIDBytes = Ints.toByteArray(eventKey.getAggregatorID());
    byte[] gpoBytes = GPOUtils.serialize(eventKey.getKey(), tempBal);

    bal.add(timeBytes);
    bal.add(schemaIDBytes);
    bal.add(dimensionDescriptorIDBytes);
    bal.add(aggregatorIDBytes);
    bal.add(gpoBytes);

    byte[] serializedBytes = bal.toByteArray();
    bal.clear();

    return serializedBytes;
}

From source file:org.apache.beam.sdk.io.gcp.spanner.OrderedCode.java

/**
 * @see #readSignedNumIncreasing()//from   w ww.  j  a  v  a 2  s .  c  om
 */
public void writeSignedNumIncreasing(long val) {
    long x = val < 0 ? ~val : val;
    if (x < 64) { // Fast path for encoding length == 1.
        byte[] encodedArray = new byte[] { (byte) (LENGTH_TO_HEADER_BITS[1][0] ^ val) };
        encodedArrays.add(encodedArray);
        return;
    }
    // buf = val in network byte order, sign extended to 10 bytes.
    byte signByte = val < 0 ? (byte) 0xff : 0;
    byte[] buf = new byte[2 + Longs.BYTES];
    buf[0] = buf[1] = signByte;
    System.arraycopy(Longs.toByteArray(val), 0, buf, 2, Longs.BYTES);
    int len = getSignedEncodingLength(x);
    if (len < 2) {
        throw new IllegalStateException(
                "Invalid length (" + len + ")" + " returned by getSignedEncodingLength(" + x + ")");
    }
    int beginIndex = buf.length - len;
    buf[beginIndex] ^= LENGTH_TO_HEADER_BITS[len][0];
    buf[beginIndex + 1] ^= LENGTH_TO_HEADER_BITS[len][1];

    byte[] encodedArray = new byte[len];
    System.arraycopy(buf, beginIndex, encodedArray, 0, len);
    encodedArrays.add(encodedArray);
}

From source file:org.opendedup.sdfs.replication.MetaFileImport.java

private void checkDedupFile(File metaFile) throws IOException, ReplicationCanceledException {
    if (this.closed)
        throw new ReplicationCanceledException("MetaFile Import Canceled");
    MetaDataDedupFile mf = MetaDataDedupFile.getFile(metaFile.getPath());
    ArrayList<HashLocPair> bh = new ArrayList<HashLocPair>(MAX_BATCHHASH_SIZE);
    mf.getIOMonitor().clearFileCounters(true);
    String dfGuid = mf.getDfGuid();
    if (dfGuid != null) {
        LongByteArrayMap mp = LongByteArrayMap.getMap(dfGuid);
        try {/*ww  w.  j av a2  s.c o  m*/
            SparseDataChunk ck = new SparseDataChunk();
            long prevpos = 0;
            mp.iterInit();
            while (ck != null) {
                if (this.closed)
                    throw new ReplicationCanceledException("MetaFile Import Canceled");
                if (this.lastException != null)
                    throw this.lastException;
                levt.curCt += (mp.getIterPos() - prevpos);
                prevpos = mp.getIterPos();
                ck = mp.nextValue(this.firstrun);
                if (ck != null) {
                    ck.setFpos((prevpos / mp.getFree().length) * Main.CHUNK_LENGTH);
                    List<HashLocPair> al = ck.getFingers();

                    if (Main.chunkStoreLocal) {
                        mf.getIOMonitor().addVirtualBytesWritten(Main.CHUNK_LENGTH, true);
                        // Todo : Must fix how this is counted
                        if (HashFunctionPool.max_hash_cluster > 1)
                            mf.getIOMonitor().addDulicateData(Main.CHUNK_LENGTH, true);
                        boolean hpc = false;
                        for (HashLocPair p : al) {
                            long pos = 0;
                            if (Main.refCount && Arrays.areEqual(WritableCacheBuffer.bk, p.hash))
                                pos = 0;
                            else
                                pos = HCServiceProxy.hashExists(p.hash, false);
                            boolean exists = false;
                            if (pos != -1) {
                                p.hashloc = Longs.toByteArray(pos);
                                hpc = true;
                                exists = true;
                            }
                            if (!exists) {
                                hashes.add(p.hash);
                                entries++;
                                passEntries++;
                                levt.blocksImported = entries;
                            } else {
                                if (HashFunctionPool.max_hash_cluster == 1)
                                    mf.getIOMonitor().addDulicateData(Main.CHUNK_LENGTH, true);
                            }
                            if (hashes.size() >= MAX_SZ) {
                                executor.execute(new DataImporter(this, hashes));
                                hashes = new ArrayList<byte[]>();
                            }
                        }
                        if (hpc) {
                            mp.put(ck.getFpos(), ck);
                        }
                    } else {
                        bh.addAll(ck.getFingers());
                        if (bh.size() >= MAX_BATCHHASH_SIZE) {
                            boolean cp = batchCheck(bh, mf);
                            if (cp)
                                corruption = true;
                            bh = new ArrayList<HashLocPair>(MAX_BATCHHASH_SIZE);
                        }

                    }
                }

            }
            if (bh.size() > 0) {
                boolean cp = batchCheck(bh, mf);
                if (cp)
                    corruption = true;
            }
            Main.volume.updateCurrentSize(mf.length(), true);
            if (corruption) {
                MetaFileStore.removeMetaFile(mf.getPath(), true);
                throw new IOException(
                        "Unable to continue MetaFile Import because there are too many missing blocks");
            }
            mf.setDirty(true);
            mf.sync();
            //mf.getDedupFile(false).forceRemoteSync();
            Main.volume.addFile();
        } catch (Throwable e) {
            SDFSLogger.getLog().warn("error while checking file [" + dfGuid + "]", e);
            levt.endEvent("error while checking file [" + dfGuid + "]", SDFSEvent.WARN, e);
            throw new IOException(e);
        } finally {
            mp.close();
            mp = null;
            if (this.firstrun) {
                this.virtualBytesTransmitted.addAndGet(mf.length());
                levt.virtualDataImported = this.virtualBytesTransmitted.get();
            }
        }
    }
    this.filesProcessed++;
    if (this.firstrun)
        levt.filesImported = this.filesProcessed;
}

From source file:co.paralleluniverse.galaxy.berkeleydb.BerkeleyDB.java

@Override
public void write(long id, short owner, long version, byte[] data, Object txn) {
    if (LOG.isDebugEnabled())
        LOG.debug("WRITE " + hex(id) + " ver: " + version + " data: "
                + (data != null ? "(" + data.length + " bytes)" : "null"));

    final DatabaseEntry key = new DatabaseEntry(Longs.toByteArray(id));
    final DatabaseEntry dbEntry = new DatabaseEntry();
    entryBinding.objectToEntry(new MainMemoryEntry(version, data), dbEntry);

    mainStore.put((Transaction) txn, key, dbEntry);
    // try to write owner, but only if nonexistent (i.e will happen at first put only)
    ownerDirectory.putNoOverwrite((Transaction) txn, key, new DatabaseEntry(Shorts.toByteArray(owner)));
}