Example usage for java.nio ByteBuffer getLong

List of usage examples for java.nio ByteBuffer getLong

Introduction

In this page you can find the example usage for java.nio ByteBuffer getLong.

Prototype

public abstract long getLong();

Source Link

Document

Returns the long at the current position and increases the position by 8.

Usage

From source file:loci.formats.in.KLBReader.java

private long readUInt64() throws IOException {
    byte[] b = new byte[8];
    in.read(b, 0, 8);/*from  w ww.  ja  va2 s.  c o  m*/
    ByteBuffer bb = ByteBuffer.wrap(b).order(ByteOrder.LITTLE_ENDIAN);
    return bb.getLong();
}

From source file:org.cloudgraph.hbase.graph.DefaultAssembler.java

protected UUID fetchUUID(PlasmaType type, long targetSequence, EdgeReader edgeReader, RowReader rowReader)
        throws IOException {
    byte[] uuidValue = this.getMetaDataColumnValue(type, targetSequence, EntityMetaKey.UUID,
            rowReader.getTableReader().getTableConfig(), rowReader);
    String uuidStr = null;/*  www .j a v a 2s  . c  o m*/
    uuidStr = new String(uuidValue, rowReader.getTableReader().getTableConfig().getCharset());
    UUID uuid = null;
    if (uuidStr.length() == 22) {
        byte[] bytes = Base64.decodeBase64(uuidStr);
        ByteBuffer bb = ByteBuffer.wrap(bytes);
        uuid = new UUID(bb.getLong(), bb.getLong());
    } else {
        uuid = UUID.fromString(uuidStr);
    }
    return uuid;
}

From source file:org.cloudgraph.hbase.graph.DefaultAssembler.java

protected UUID fetchRootUUID(TableReader childTableReader, GraphColumnKeyFactory keyFactory, PlasmaType subType,
        CellValues childResult) {/*from ww w.j  av a2 s.  c o m*/

    byte[] uuidQual = keyFactory.createColumnKey(subType, EntityMetaKey.UUID);
    byte[] rootUuid = childResult
            .getColumnValue(childTableReader.getTableConfig().getDataColumnFamilyNameBytes(), uuidQual);
    if (rootUuid == null)
        throw new GraphServiceException("expected column: " + Bytes.toString(uuidQual) + " for row '"
                + childResult.getRowKey() + "' in table: " + childTableReader.getTableConfig().getName());
    String uuidStr = new String(rootUuid, childTableReader.getTableConfig().getCharset());
    UUID uuid = null;
    if (uuidStr.length() == 22) {
        byte[] bytes = Base64.decodeBase64(uuidStr);
        ByteBuffer bb = ByteBuffer.wrap(bytes);
        uuid = new UUID(bb.getLong(), bb.getLong());
    } else {
        uuid = UUID.fromString(uuidStr);
    }
    return uuid;
}

From source file:eu.ensure.packproc.ip.PackageProcessor.java

/**
 * Generic entry to the information package-processor.
 * <p>// w w  w.j a va 2  s. co  m
 * Will route to more specific actions based on the plugin-specific configuration.
 * <p>
 * @param name - name of entity (information package)
 * @param inputStream - input stream onto information package
 * @param outputStream - [optionally] output stream onto (new) information package
 * @param context - a context for this processor
 * @throws IOException - if file I/O fails
 * @throws ArchiveException - if information package has unknown packaging format
 * @throws ProcessorException - if processing of information package fails
 * @throws ClassNotFoundException - if action not found
 */
public void process(String name, InputStream inputStream, OutputStream outputStream, ProcessorContext context)
        throws IOException, ArchiveException, ProcessorException, ClassNotFoundException {

    BasicProcessorContext basicContext = context.push(new BasicProcessorContext(name));
    boolean isMutableCall = null != outputStream;

    ArchiveInputStream archiveInputStream = null;
    PackageOutputStream archiveOutputStream = null;
    try {
        // Package readers and writers
        archiveInputStream = factory.createArchiveInputStream(new BufferedInputStream(inputStream));

        if (isMutableCall) {
            archiveOutputStream = PackageOutputStream.createOutputStreamFrom(archiveInputStream, outputStream);
        }

        // Iterate through objects in the input package
        ArchiveEntry archiveEntry = null;

        with_next_entry: while ((archiveEntry = archiveInputStream.getNextEntry()) != null) {

            String entryName = archiveEntry.getName();
            if (archiveEntry.isDirectory()) {
                entryName += "/";
            }

            if (log.isInfoEnabled()) {
                log.info("");
                String info = "### " + entryName;
                long size = archiveEntry.getSize();
                info += " (~" + Number.asHumanApproximate(size) + " or " + size + " bytes)";
                log.info(info);
            }

            // TODO: Triggers for "/" will have to be processed manually here!

            MultiDigestInputStream entryInputStream = null;
            try {
                PackageEntry structureEntry = new PackageEntry(archiveEntry);
                entryInputStream = new MultiDigestInputStream(archiveInputStream); // As it happens to be!

                // Directories are not processed per se
                Iterator<Action> ait = actions.iterator();
                while (ait.hasNext()) {
                    Action action = ait.next();

                    if (action.match(structureEntry.getName())) {
                        if (log.isDebugEnabled()) {
                            log.debug(me() + ":process container");
                        }
                        Processor processor = action.getProcessor();
                        if (processor instanceof ContainerStructureProcessor) {
                            if (action.getMethod().equalsIgnoreCase("process")) {
                                //-----------------------------------------------------------------------------
                                // Since we are referring to a structure (processor), we are probably just
                                // going to process an embedded TAR-file (or the like). We create a
                                // temporary file and recursively feed it to the processor manager...
                                //-----------------------------------------------------------------------------
                                File subInputFile = extractEntry(structureEntry, entryInputStream);
                                File subOutputFile = null;
                                if (isMutableCall) {
                                    subOutputFile = File.createTempFile("temporary-processed", ".package");
                                }
                                try {
                                    InputStream subInputStream = null;
                                    OutputStream subOutputStream = null;
                                    try {
                                        subInputStream = new BufferedInputStream(
                                                new FileInputStream(subInputFile));
                                        if (isMutableCall) {
                                            subOutputStream = new BufferedOutputStream(
                                                    new FileOutputStream(subOutputFile));
                                        }

                                        // Run it through the processor manager which knows what to do with it
                                        manager.applyOnContainerWithStructure(action.getProcessor(),
                                                action.getMethod(), structureEntry.getName(), subInputStream,
                                                subOutputStream, basicContext);
                                    } finally {
                                        if (null != subInputStream)
                                            subInputStream.close();
                                        if (null != subOutputStream)
                                            subOutputStream.close();
                                    }

                                    if (isMutableCall) {
                                        // Add the temporary file to the output stream instead of the original
                                        addEntry(subOutputFile, structureEntry, archiveOutputStream);
                                    }
                                } finally {
                                    if (null != subInputFile && subInputFile.exists())
                                        subInputFile.delete();
                                    if (null != subOutputFile && subOutputFile.exists())
                                        subOutputFile.delete();
                                }
                                continue with_next_entry; // since we operated on a unique entry

                            } else {
                                // Unknown operation on a container file
                                throw new ProcessorException(
                                        "Unknown action on container: " + action.getMethod());
                            }
                        } else if (processor instanceof FileProcessor) {
                            //---------------------------------------------------------------------------------
                            // Since we are referring to a file processor, we will just pass the entry with it's
                            // input stream back to the processor manager that will know what to do with it.
                            //---------------------------------------------------------------------------------
                            manager.applyOnEntry(action.getProcessor(), action.getMethod(), structureEntry,
                                    entryInputStream, archiveOutputStream, basicContext);
                            continue with_next_entry; // since we operated on a unique entry
                        }
                    }
                }

                if (isMutableCall && !addedEntries.contains(structureEntry.getName())) {
                    // We may safely copy file
                    copyEntry(structureEntry, entryInputStream, archiveOutputStream);
                }
            } finally {
                /*
                 * Don't close the entryInputStream! It is just a reference to the archiveInputStream
                 * which we want to continue operating upon.
                 */

                if (!archiveEntry.isDirectory()) {
                    // Collect bitstream information - this is where we associate _actual_ values,
                    // i.e. calculated checksums and calculated byte lengths.
                    Map<String, String> bitstreamInfo = new HashMap<String, String>();

                    // OBSERVE: The following might not be completely valid in all circumstances,
                    // as InputStream.getSize() only returns the number of bytes that you can read
                    // and not necessarily the number of bytes in the stream. But in this case,
                    // I believe it to be valid...
                    if (entryInputStream.getSize() > 0) {
                        bitstreamInfo.put("size", "" + entryInputStream.getSize());

                        Map<String, byte[]> digests = entryInputStream.getDigests();
                        for (String key : digests.keySet()) {
                            byte[] digest = digests.get(key);

                            if (digest.length == 8) {
                                ByteBuffer buf = ByteBuffer.wrap(digest);
                                String value = "" + buf.getLong();
                                bitstreamInfo.put(key, value);
                            } else {
                                StringBuffer hexString = new StringBuffer();
                                for (int i = 0; i < digest.length; i++) {
                                    hexString.append(Integer.toHexString(0xFF & digest[i]));
                                }
                                String value = hexString.toString();
                                bitstreamInfo.put(key, value);
                            }
                        }

                        // Create a package-relative path...
                        File top = new File("/");
                        File contentStream = top; // starting point relative to top

                        // ...and reassemble
                        int start = entryName.startsWith("/") ? 0 : 1; /* skip [example1]/content/... */

                        String[] parts = entryName.split("/");
                        for (int i = start; i < parts.length; i++) {
                            contentStream = new File(contentStream, parts[i]);
                        }
                        bitstreamInfo.put("fileName", parts[parts.length - 1]);

                        String path = contentStream.getPath().replace("\\", "/"); // in case we're on Windoze
                        context.associate("CALCULATED", path, path, bitstreamInfo);
                    }
                }
            }
        }
    } finally {
        if (null != archiveOutputStream)
            archiveOutputStream.close();
        if (null != archiveInputStream)
            archiveInputStream.close();

        context.pop();
    }
}

From source file:org.cloudgraph.hbase.graph.DefaultAssembler.java

protected UUID findRootUUID(TableReader childTableReader, GraphColumnKeyFactory keyFactory, PlasmaType subType,
        CellValues childResult) {/*w  w  w.  j  a  va  2s .c o m*/

    byte[] uuidQual = keyFactory.createColumnKey(subType, EntityMetaKey.UUID);
    byte[] rootUuid = childResult
            .getColumnValue(childTableReader.getTableConfig().getDataColumnFamilyNameBytes(), uuidQual);
    if (rootUuid != null) {
        String uuidStr = null;
        uuidStr = new String(rootUuid, childTableReader.getTableConfig().getCharset());
        UUID uuid = null;
        if (uuidStr.length() == 22) {
            byte[] bytes = Base64.decodeBase64(uuidStr);
            ByteBuffer bb = ByteBuffer.wrap(bytes);
            uuid = new UUID(bb.getLong(), bb.getLong());
        } else {
            uuid = UUID.fromString(uuidStr);
        }
        return uuid;
    }
    return null;
}

From source file:org.cryptomator.crypto.aes256.Aes256Cryptor.java

@Override
public Long decryptedContentLength(SeekableByteChannel encryptedFile) throws IOException {
    // skip 128bit IV + 256 bit MAC:
    encryptedFile.position(48);//from  w  ww .j a  v a 2  s. c o  m

    // read encrypted value:
    final ByteBuffer encryptedFileSizeBuffer = ByteBuffer.allocate(AES_BLOCK_LENGTH);
    final int numFileSizeBytesRead = encryptedFile.read(encryptedFileSizeBuffer);

    // return "unknown" value, if EOF
    if (numFileSizeBytesRead != encryptedFileSizeBuffer.capacity()) {
        return null;
    }

    // decrypt size:
    try {
        final Cipher sizeCipher = aesEcbCipher(primaryMasterKey, Cipher.DECRYPT_MODE);
        final byte[] decryptedFileSize = sizeCipher.doFinal(encryptedFileSizeBuffer.array());
        final ByteBuffer fileSizeBuffer = ByteBuffer.wrap(decryptedFileSize);
        return fileSizeBuffer.getLong();
    } catch (IllegalBlockSizeException | BadPaddingException e) {
        throw new IllegalStateException(e);
    }
}

From source file:org.hobbit.core.components.AbstractEvaluationStorage.java

@Override
public void init() throws Exception {
    super.init();

    String queueName = EnvVariables.getString(Constants.TASK_GEN_2_EVAL_STORAGE_QUEUE_NAME_KEY,
            Constants.TASK_GEN_2_EVAL_STORAGE_DEFAULT_QUEUE_NAME);
    taskResultReceiver = DataReceiverImpl.builder().maxParallelProcessedMsgs(maxParallelProcessedMsgs)
            .queue(incomingDataQueueFactory, generateSessionQueueName(queueName))
            .dataHandler(new DataHandler() {
                @Override/*from   www.  j  a v a2 s .  c  om*/
                public void handleData(byte[] data) {
                    ByteBuffer buffer = ByteBuffer.wrap(data);
                    String taskId = RabbitMQUtils.readString(buffer);
                    LOGGER.trace("Received from task generator {}.", taskId);
                    byte[] taskData = RabbitMQUtils.readByteArray(buffer);
                    long timestamp = buffer.getLong();
                    receiveExpectedResponseData(taskId, timestamp, taskData);
                }
            }).build();

    queueName = EnvVariables.getString(Constants.SYSTEM_2_EVAL_STORAGE_QUEUE_NAME_KEY,
            Constants.SYSTEM_2_EVAL_STORAGE_DEFAULT_QUEUE_NAME);
    final boolean receiveTimeStamp = EnvVariables.getBoolean(RECEIVE_TIMESTAMP_FOR_SYSTEM_RESULTS_KEY, false,
            LOGGER);
    final String ackExchangeName = generateSessionQueueName(Constants.HOBBIT_ACK_EXCHANGE_NAME);
    systemResultReceiver = DataReceiverImpl.builder().maxParallelProcessedMsgs(maxParallelProcessedMsgs)
            .queue(incomingDataQueueFactory, generateSessionQueueName(queueName))
            .dataHandler(new DataHandler() {
                @Override
                public void handleData(byte[] data) {
                    ByteBuffer buffer = ByteBuffer.wrap(data);
                    String taskId = RabbitMQUtils.readString(buffer);
                    LOGGER.trace("Received from system {}.", taskId);
                    byte[] responseData = RabbitMQUtils.readByteArray(buffer);
                    long timestamp = receiveTimeStamp ? buffer.getLong() : System.currentTimeMillis();
                    receiveResponseData(taskId, timestamp, responseData);
                    // If we should send acknowledgments (and there was no
                    // error until now)
                    if (ackChannel != null) {
                        try {
                            ackChannel.basicPublish(ackExchangeName, "", null,
                                    RabbitMQUtils.writeString(taskId));
                        } catch (IOException e) {
                            LOGGER.error("Error while sending acknowledgement.", e);
                        }
                        LOGGER.trace("Sent ack {}.", taskId);
                    }
                }
            }).build();

    queueName = EnvVariables.getString(Constants.EVAL_MODULE_2_EVAL_STORAGE_QUEUE_NAME_KEY,
            Constants.EVAL_MODULE_2_EVAL_STORAGE_DEFAULT_QUEUE_NAME);
    evalModule2EvalStoreQueue = getFactoryForIncomingDataQueues()
            .createDefaultRabbitQueue(generateSessionQueueName(queueName));
    evalModule2EvalStoreQueue.channel.basicConsume(evalModule2EvalStoreQueue.name, true,
            new DefaultConsumer(evalModule2EvalStoreQueue.channel) {
                @Override
                public void handleDelivery(String consumerTag, Envelope envelope, BasicProperties properties,
                        byte[] body) throws IOException {
                    byte response[] = null;
                    // get iterator id
                    ByteBuffer buffer = ByteBuffer.wrap(body);
                    if (buffer.remaining() < 1) {
                        response = EMPTY_RESPONSE;
                        LOGGER.error("Got a request without a valid iterator Id. Returning emtpy response.");
                    } else {
                        byte iteratorId = buffer.get();

                        // get the iterator
                        Iterator<? extends ResultPair> iterator = null;
                        if (iteratorId == NEW_ITERATOR_ID) {
                            // create and save a new iterator
                            iteratorId = (byte) resultPairIterators.size();
                            LOGGER.info("Creating new iterator #{}", iteratorId);
                            resultPairIterators.add(iterator = createIterator());
                        } else if ((iteratorId < 0) || iteratorId >= resultPairIterators.size()) {
                            response = EMPTY_RESPONSE;
                            LOGGER.error("Got a request without a valid iterator Id ("
                                    + Byte.toString(iteratorId) + "). Returning emtpy response.");
                        } else {
                            iterator = resultPairIterators.get(iteratorId);
                        }
                        if ((iterator != null) && (iterator.hasNext())) {
                            ResultPair resultPair = iterator.next();
                            Result result = resultPair.getExpected();
                            byte expectedResultData[], expectedResultTimeStamp[], actualResultData[],
                                    actualResultTimeStamp[];
                            // Make sure that the result is not null
                            if (result != null) {
                                // Check whether the data array is null
                                expectedResultData = result.getData() != null ? result.getData() : new byte[0];
                                expectedResultTimeStamp = RabbitMQUtils.writeLong(result.getSentTimestamp());
                            } else {
                                expectedResultData = new byte[0];
                                expectedResultTimeStamp = RabbitMQUtils.writeLong(0);
                            }
                            result = resultPair.getActual();
                            // Make sure that the result is not null
                            if (result != null) {
                                // Check whether the data array is null
                                actualResultData = result.getData() != null ? result.getData() : new byte[0];
                                actualResultTimeStamp = RabbitMQUtils.writeLong(result.getSentTimestamp());
                            } else {
                                actualResultData = new byte[0];
                                actualResultTimeStamp = RabbitMQUtils.writeLong(0);
                            }

                            response = RabbitMQUtils.writeByteArrays(
                                    new byte[] { iteratorId }, new byte[][] { expectedResultTimeStamp,
                                            expectedResultData, actualResultTimeStamp, actualResultData },
                                    null);
                        } else {
                            response = new byte[] { iteratorId };
                        }
                    }
                    getChannel().basicPublish("", properties.getReplyTo(), null, response);
                }
            });

    boolean sendAcks = EnvVariables.getBoolean(Constants.ACKNOWLEDGEMENT_FLAG_KEY, false, LOGGER);
    if (sendAcks) {
        // Create channel for acknowledgements
        ackChannel = getFactoryForOutgoingCmdQueues().getConnection().createChannel();
        ackChannel.exchangeDeclare(generateSessionQueueName(Constants.HOBBIT_ACK_EXCHANGE_NAME), "fanout",
                false, true, null);
    }
}

From source file:com.healthmarketscience.jackcess.impl.OleUtil.java

/**
 * creates the appropriate ContentImpl for the given blob.
 *//*from w  w w  . j a  v a 2s .  c o m*/
private static ContentImpl parseContent(OleBlobImpl blob) throws IOException {
    ByteBuffer bb = PageChannel.wrap(blob.getBytes());

    if ((bb.remaining() < 2) || (bb.getShort() != PACKAGE_SIGNATURE)) {
        return new UnknownContentImpl(blob);
    }

    // read outer package header
    int headerSize = bb.getShort();
    int objType = bb.getInt();
    int prettyNameLen = bb.getShort();
    int classNameLen = bb.getShort();
    int prettyNameOff = bb.getShort();
    int classNameOff = bb.getShort();
    int objSize = bb.getInt();
    String prettyName = readStr(bb, prettyNameOff, prettyNameLen);
    String className = readStr(bb, classNameOff, classNameLen);
    bb.position(headerSize);

    // read ole header
    int oleVer = bb.getInt();
    int format = bb.getInt();

    if (oleVer != OLE_VERSION) {
        return new UnknownContentImpl(blob);
    }

    int typeNameLen = bb.getInt();
    String typeName = readStr(bb, bb.position(), typeNameLen);
    bb.getLong(); // unused
    int dataBlockLen = bb.getInt();
    int dataBlockPos = bb.position();

    if (SIMPLE_PACKAGE_TYPE.equalsIgnoreCase(typeName)) {
        return createSimplePackageContent(blob, prettyName, className, typeName, bb, dataBlockLen);
    }

    // if COMPOUND_FACTORY is null, the poi library isn't available, so just
    // load compound data as "other"
    if ((COMPOUND_FACTORY != null) && (bb.remaining() >= COMPOUND_STORAGE_SIGNATURE.length)
            && ByteUtil.matchesRange(bb, bb.position(), COMPOUND_STORAGE_SIGNATURE)) {
        return COMPOUND_FACTORY.createCompoundPackageContent(blob, prettyName, className, typeName, bb,
                dataBlockLen);
    }

    // this is either some other "special" (as yet unhandled) format, or it is
    // simply an embedded file (or it is compound data and poi isn't available)
    return new OtherContentImpl(blob, prettyName, className, typeName, dataBlockPos, dataBlockLen);
}

From source file:voldemort.store.cachestore.impl.ChannelStore.java

/**
 * get keyOffset and len from index channel
 * @param record #//ww w .j a  v a 2s.c o m
 * @return key Object
 */
public Key readKey(int record) throws IOException {
    ByteBuffer buf = ByteBuffer.allocate(RECORD_SIZE);
    indexChannel.read(buf, (long) record * RECORD_SIZE + OFFSET);
    buf.rewind();
    byte status = buf.get();
    if (isDeleted(status))
        return null;
    else {
        long key = buf.getLong();
        byte[] keys = readChannel(key, keyChannel);
        return toKey(keys);
    }
}

From source file:ezbake.security.service.registration.handler.EzSecurityRegistrationHandler.java

/**
 * Generate a security ID//ww  w  .j  a v  a  2  s. c om
 * @return a random security ID
 */
protected String generateSecurtyId() {
    byte[] bytes = new byte[64];
    new SecureRandom().nextBytes(bytes);
    ByteBuffer buff = ByteBuffer.wrap(bytes);
    return String.valueOf(buff.getLong() & 0x7fffffffffffffffL);
}