Example usage for java.nio ByteBuffer allocateDirect

List of usage examples for java.nio ByteBuffer allocateDirect

Introduction

In this page you can find the example usage for java.nio ByteBuffer allocateDirect.

Prototype

public static ByteBuffer allocateDirect(int capacity) 

Source Link

Document

Creates a direct byte buffer based on a newly allocated memory block.

Usage

From source file:org.apache.hadoop.io.compress.zlib.ZlibCompressor.java

/** 
 * Creates a new compressor using the specified compression level.
 * Compressed data will be generated in ZLIB format.
 * /*www  .  j a  va  2  s .  com*/
 * @param level Compression level #CompressionLevel
 * @param strategy Compression strategy #CompressionStrategy
 * @param header Compression header #CompressionHeader
 * @param directBufferSize Size of the direct buffer to be used.
 */
public ZlibCompressor(CompressionLevel level, CompressionStrategy strategy, CompressionHeader header,
        int directBufferSize) {
    this.level = level;
    this.strategy = strategy;
    this.windowBits = header;
    stream = init(this.level.compressionLevel(), this.strategy.compressionStrategy(),
            this.windowBits.windowBits());

    this.directBufferSize = directBufferSize;
    uncompressedDirectBuf = ByteBuffer.allocateDirect(directBufferSize);
    compressedDirectBuf = ByteBuffer.allocateDirect(directBufferSize);
    compressedDirectBuf.position(directBufferSize);
}

From source file:org.jtrfp.trcl.core.ResourceManager.java

public ResourceManager(final TR tr) {
    this.tr = tr;
    try {//from w w w  .j a v a2  s . co  m
        Class.forName("de.quippy.javamod.multimedia.mod.loader.tracker.ProTrackerMod");
        Class.forName("de.quippy.javamod.multimedia.mod.ModContainer"); // ModContainer uses the ModFactory!!
    } catch (Exception e) {
        tr.showStopper(e);
    }
    gpuResidentMODs = new CachedObjectFactory<String, GPUResidentMOD>() {
        @Override
        protected GPUResidentMOD generate(String key) {
            return new GPUResidentMOD(tr, getMOD(key));
        }//end generate(...)
    };
    soundTextures = new CachedObjectFactory<String, SoundTexture>() {
        @Override
        protected SoundTexture generate(String key) {
            try {
                final AudioInputStream ais = AudioSystem
                        .getAudioInputStream(getInputStreamFromResource("SOUND\\" + key));
                final FloatBuffer fb = ByteBuffer.allocateDirect((int) ais.getFrameLength() * 4)
                        .order(ByteOrder.nativeOrder()).asFloatBuffer();
                int value;
                while ((value = ais.read()) != -1) {
                    fb.put(((float) (value - 128)) / 128f);
                }
                fb.clear();
                return tr.soundSystem.get().newSoundTexture(fb, (int) ais.getFormat().getFrameRate());
            } catch (Exception e) {
                tr.showStopper(e);
                return null;
            }
        }
    };

    setupPODListeners();
}

From source file:org.apache.mnemonic.Utils.java

/**
 * resize a bytebuffer with a new instance
 *
 * @param buf/*from  w  w  w . ja v  a2  s . c o m*/
 *          specify a buf to resize
 *
 * @param size
 *          specify the size for resizing
 *
 * @return the resized bytebuffer instance
 */
public static ByteBuffer resizeByteBuffer(ByteBuffer buf, long size) {
    ByteBuffer ret = ByteBuffer.allocateDirect((int) size);
    if (ret != null) {
        if (null != buf) {
            ret.put(buf);
            ret.flip();
        }
    }
    return ret;
}

From source file:de.fhg.fokus.diameter.DiameterPeer.transport.Communicator.java

public void run() {
    MessageInfo messageInfo = null;/* w  w  w. j ava2  s.  c  o m*/
    ByteBuffer receiveByteBuffer = ByteBuffer.allocateDirect(MAX_MESSAGE_LENGTH);
    DiameterMessage msg = null;
    byte[] buffer = null;
    int len = 0;
    //handler to keep track of association setup and termination
    AssociationHandler assocHandler = new AssociationHandler();
    try {
        while (this.running) {
            messageInfo = sctpChannel.receive(receiveByteBuffer, System.out, assocHandler);
            log.debug("Received msg from communicator:" + this + " and sctpChannel:" + sctpChannel);
            log.debug("Received msg's length:" + messageInfo.bytes());
            log.error("Received msg's length:" + messageInfo.bytes());
            receiveByteBuffer.flip();

            if (receiveByteBuffer.remaining() > 0) {
                buffer = new byte[messageInfo.bytes()];
                receiveByteBuffer.get(buffer);
                receiveByteBuffer.clear();
                // log.debug("The origin message stream  is:\n" + CommonMethod.byteToHex(buffer));
                //first we check the version
                if (buffer[0] != 1) {
                    log.error("Expecting diameter version 1, received version " + buffer[0]);
                    continue;
                }
                //then we check the length of the message
                len = ((int) buffer[1] & 0xFF) << 16 | ((int) buffer[2] & 0xFF) << 8 | ((int) buffer[3] & 0xFF);
                if (len > MAX_MESSAGE_LENGTH) {
                    log.error("Message too long (msg length:" + len + " > max buffer length:"
                            + MAX_MESSAGE_LENGTH + ").");
                    continue;
                }
                //now we can decode the message
                try {
                    msg = Codec.decodeDiameterMessage(buffer, 0);
                } catch (DiameterMessageDecodeException e) {
                    log.error("Error decoding diameter message !");
                    log.error(e, e);
                    msg = null;
                    continue;
                }
                msg.networkTime = System.currentTimeMillis();
                log.debug("Received message is:\n" + msg);
                if (this.peer != null) {
                    this.peer.refreshTimer();
                }
                processMessage(msg);
            }
            msg = null;
        }
    } catch (Exception e1) {
        log.error("Exception:" + e1.getCause() + " catched in communicator:" + this + " and running flag="
                + running);
        if (this.running) {
            if (this.peer != null) {
                if (this.peer.I_comm == this) {
                    StateMachine.process(this.peer, StateMachine.I_Peer_Disc);
                }
                if (this.peer.R_comm == this) {
                    log.error("Now closing the peer:" + this.peer);
                    StateMachine.process(this.peer, StateMachine.R_Peer_Disc);
                }
            }
            log.error("Error reading from sctpChannel:" + sctpChannel + ", the channel might be colsed.");

        } /* else it was a shutdown request, it's normal */
    }
    log.debug("Now closing communicator:" + this + ", and it's sctpChannel:" + sctpChannel);
    this.running = false;
    try {
        sctpChannel.close();
    } catch (IOException e) {
        log.error("Error closing sctpChannel !");
        log.error(e, e);
    }
}

From source file:InterleavedNIOBuffer.java

void createInterleavedBuffers() {
    int size;/*  w  ww.  j ava  2 s .c  om*/
    ByteOrder order = ByteOrder.nativeOrder();

    size = (2 + 2 + 3 + 3) * 3 * 4;
    FloatBuffer vertex = ByteBuffer.allocateDirect(size * 4).order(order).asFloatBuffer();
    vertex.put(interleaved, 0, size);
    interleavedBuffer = new J3DBuffer(vertex);

    size = (2 + 2 + 3 + 3) * 4;
    FloatBuffer indexedVertex = ByteBuffer.allocateDirect(size * 4).order(order).asFloatBuffer();
    indexedVertex.put(indexedInterleaved, 0, size);
    indexedInterleavedBuffer = new J3DBuffer(indexedVertex);
}

From source file:com.linkedin.pinot.perf.ForwardIndexReaderBenchmark.java

public static void multiValuedReadBenchMarkV1(File file, int numDocs, int totalNumValues, int maxEntriesPerDoc,
        int columnSizeInBits) throws Exception {
    System.out.println("******************************************************************");
    System.out.println("Analyzing " + file.getName() + " numDocs:" + numDocs + ", totalNumValues:"
            + totalNumValues + ", maxEntriesPerDoc:" + maxEntriesPerDoc + ", numBits:" + columnSizeInBits);
    long start, end;
    boolean readFile = true;
    boolean randomRead = true;
    boolean contextualRead = true;
    boolean signed = false;
    boolean isMmap = false;
    PinotDataBuffer heapBuffer = PinotDataBuffer.fromFile(file, ReadMode.mmap, FileChannel.MapMode.READ_ONLY,
            "benchmarking");
    BaseSingleColumnMultiValueReader reader = new com.linkedin.pinot.core.io.reader.impl.v1.FixedBitMultiValueReader(
            heapBuffer, numDocs, totalNumValues, columnSizeInBits, signed);

    int[] intArray = new int[maxEntriesPerDoc];
    File outfile = new File("/tmp/" + file.getName() + ".raw");
    FileWriter fw = new FileWriter(outfile);
    for (int i = 0; i < numDocs; i++) {
        int length = reader.getIntArray(i, intArray);
        StringBuilder sb = new StringBuilder();
        String delim = "";
        for (int j = 0; j < length; j++) {
            sb.append(delim);/*w  w w . j a v a  2 s  .  c  om*/
            sb.append(intArray[j]);
            delim = ",";
        }
        fw.write(sb.toString());
        fw.write("\n");
    }
    fw.close();

    // sequential read
    if (readFile) {
        DescriptiveStatistics stats = new DescriptiveStatistics();
        RandomAccessFile raf = new RandomAccessFile(file, "rw");
        ByteBuffer buffer = ByteBuffer.allocateDirect((int) file.length());
        raf.getChannel().read(buffer);
        for (int run = 0; run < MAX_RUNS; run++) {
            long length = file.length();
            start = System.currentTimeMillis();
            for (int i = 0; i < length; i++) {
                byte b = buffer.get(i);
            }
            end = System.currentTimeMillis();
            stats.addValue((end - start));
        }
        System.out.println("v1 multi value read bytes stats for " + file.getName());
        System.out.println(
                stats.toString().replaceAll("\n", ", ") + " raw:" + Arrays.toString(stats.getValues()));

        raf.close();
    }
    if (randomRead) {
        DescriptiveStatistics stats = new DescriptiveStatistics();
        for (int run = 0; run < MAX_RUNS; run++) {
            start = System.currentTimeMillis();
            for (int i = 0; i < numDocs; i++) {
                int length = reader.getIntArray(i, intArray);
            }
            end = System.currentTimeMillis();
            stats.addValue((end - start));
        }
        System.out.println("v1 multi value sequential read one stats for " + file.getName());
        System.out.println(
                stats.toString().replaceAll("\n", ", ") + " raw:" + Arrays.toString(stats.getValues()));
    }

    if (contextualRead) {
        DescriptiveStatistics stats = new DescriptiveStatistics();
        for (int run = 0; run < MAX_RUNS; run++) {
            MultiValueReaderContext context = (MultiValueReaderContext) reader.createContext();
            start = System.currentTimeMillis();
            for (int i = 0; i < numDocs; i++) {
                int length = reader.getIntArray(i, intArray, context);
            }
            end = System.currentTimeMillis();
            // System.out.println("RUN:" + run + "Time:" + (end-start));
            stats.addValue((end - start));
        }
        System.out.println("v1 multi value sequential read one with context stats for " + file.getName());
        System.out.println(
                stats.toString().replaceAll("\n", ", ") + " raw:" + Arrays.toString(stats.getValues()));

    }
    reader.close();
    heapBuffer.close();
    System.out.println("******************************************************************");

}

From source file:de.metalcon.imageServer.protocol.CreateRequestTest.java

/**
 * compare two input streams/* w w w . j  a v  a2 s .com*/
 * 
 * @param stream1
 *            first input stream
 * @param stream2
 *            second input stream
 * @return true - if the two streams does contain the same content<br>
 *         false - otherwise
 * @throws IOException
 *             if IO errors occurred
 */
private static boolean compareInputStreams(final InputStream stream1, final InputStream stream2)
        throws IOException {
    final ReadableByteChannel channel1 = Channels.newChannel(stream1);
    final ReadableByteChannel channel2 = Channels.newChannel(stream2);
    final ByteBuffer buffer1 = ByteBuffer.allocateDirect(4096);
    final ByteBuffer buffer2 = ByteBuffer.allocateDirect(4096);

    try {
        while (true) {

            int n1 = channel1.read(buffer1);
            int n2 = channel2.read(buffer2);

            if ((n1 == -1) || (n2 == -1)) {
                return n1 == n2;
            }

            buffer1.flip();
            buffer2.flip();

            for (int i = 0; i < Math.min(n1, n2); i++) {
                if (buffer1.get() != buffer2.get()) {
                    return false;
                }
            }

            buffer1.compact();
            buffer2.compact();
        }

    } finally {
        if (stream1 != null) {
            stream1.close();
        }
        if (stream2 != null) {
            stream2.close();
        }
    }
}

From source file:hivemall.topicmodel.ProbabilisticTopicModelBaseUDTF.java

protected void recordTrainSampleToTempFile(@Nonnull final String[] wordCounts) throws HiveException {
    if (iterations == 1) {
        return;//from  w w  w.j a  va  2  s  .  co m
    }

    ByteBuffer buf = inputBuf;
    NioStatefulSegment dst = fileIO;

    if (buf == null) {
        final File file;
        try {
            file = File.createTempFile("hivemall_topicmodel", ".sgmt");
            file.deleteOnExit();
            if (!file.canWrite()) {
                throw new UDFArgumentException("Cannot write a temporary file: " + file.getAbsolutePath());
            }
            logger.info("Record training samples to a file: " + file.getAbsolutePath());
        } catch (IOException ioe) {
            throw new UDFArgumentException(ioe);
        } catch (Throwable e) {
            throw new UDFArgumentException(e);
        }
        this.inputBuf = buf = ByteBuffer.allocateDirect(1024 * 1024); // 1 MB
        this.fileIO = dst = new NioStatefulSegment(file, false);
    }

    // wordCounts length, wc1 length, wc1 string, wc2 length, wc2 string, ...
    int wcLengthTotal = 0;
    for (String wc : wordCounts) {
        if (wc == null) {
            continue;
        }
        wcLengthTotal += wc.length();
    }
    int recordBytes = SizeOf.INT + SizeOf.INT * wordCounts.length + wcLengthTotal * SizeOf.CHAR;
    int requiredBytes = SizeOf.INT + recordBytes; // need to allocate space for "recordBytes" itself

    int remain = buf.remaining();
    if (remain < requiredBytes) {
        writeBuffer(buf, dst);
    }

    buf.putInt(recordBytes);
    buf.putInt(wordCounts.length);
    for (String wc : wordCounts) {
        NIOUtils.putString(wc, buf);
    }
}

From source file:com.intel.chimera.stream.AbstractCryptoStreamTest.java

private void doByteBufferWrite(String cipherClass, ByteArrayOutputStream baos, boolean withChannel)
        throws Exception {
    baos.reset();//from  www  .jav a2 s .  co  m
    CryptoOutputStream out = getCryptoOutputStream(baos, getCipher(cipherClass), defaultBufferSize, iv,
            withChannel);
    ByteBuffer buf = ByteBuffer.allocateDirect(dataLen / 2);
    buf.put(data, 0, dataLen / 2);
    buf.flip();
    int n1 = out.write(buf);

    buf.clear();
    buf.put(data, n1, dataLen / 3);
    buf.flip();
    int n2 = out.write(buf);

    buf.clear();
    buf.put(data, n1 + n2, dataLen - n1 - n2);
    buf.flip();
    int n3 = out.write(buf);

    Assert.assertEquals(dataLen, n1 + n2 + n3);

    out.flush();

    InputStream in = getCryptoInputStream(new ByteArrayInputStream(encData), getCipher(cipherClass),
            defaultBufferSize, iv, withChannel);
    buf = ByteBuffer.allocate(dataLen + 100);
    byteBufferReadCheck(in, buf, 0);
    in.close();
}

From source file:org.gephi.io.importer.api.ImportUtils.java

/**
 * Uncompress a Bzip2 file./*from w  ww  .  j ava  2 s . c o m*/
 */
public static File getBzipFile(FileObject in, File out, boolean isTar) throws IOException {

    // Stream buffer
    final int BUFF_SIZE = 8192;
    final byte[] buffer = new byte[BUFF_SIZE];

    BZip2CompressorInputStream inputStream = null;
    FileOutputStream outStream = null;

    try {
        FileInputStream is = new FileInputStream(in.getPath());
        inputStream = new BZip2CompressorInputStream(is);
        outStream = new FileOutputStream(out.getAbsolutePath());

        if (isTar) {
            // Read Tar header
            int remainingBytes = readTarHeader(inputStream);

            // Read content
            ByteBuffer bb = ByteBuffer.allocateDirect(4 * BUFF_SIZE);
            byte[] tmpCache = new byte[BUFF_SIZE];
            int nRead, nGet;
            while ((nRead = inputStream.read(tmpCache)) != -1) {
                if (nRead == 0) {
                    continue;
                }
                bb.put(tmpCache);
                bb.position(0);
                bb.limit(nRead);
                while (bb.hasRemaining() && remainingBytes > 0) {
                    nGet = Math.min(bb.remaining(), BUFF_SIZE);
                    nGet = Math.min(nGet, remainingBytes);
                    bb.get(buffer, 0, nGet);
                    outStream.write(buffer, 0, nGet);
                    remainingBytes -= nGet;
                }
                bb.clear();
            }
        } else {
            int len;
            while ((len = inputStream.read(buffer)) > 0) {
                outStream.write(buffer, 0, len);
            }
        }
    } catch (IOException ex) {
        Exceptions.printStackTrace(ex);
    } finally {
        if (inputStream != null) {
            inputStream.close();
        }
        if (outStream != null) {
            outStream.close();
        }
    }

    return out;
}