Example usage for java.nio ByteBuffer flip

List of usage examples for java.nio ByteBuffer flip

Introduction

In this page you can find the example usage for java.nio ByteBuffer flip.

Prototype

public final Buffer flip() 

Source Link

Document

Flips this buffer.

Usage

From source file:morphy.service.SocketConnectionService.java

protected synchronized String readMessage(SocketChannel channel) {
    try {//from www .java 2 s .  c  om
        ByteBuffer buffer = ByteBuffer.allocate(maxCommunicationSizeBytes);
        int charsRead = -1;
        try {
            charsRead = channel.read(buffer);
        } catch (IOException cce) {
            if (channel.isOpen()) {
                channel.close();
                if (LOG.isInfoEnabled()) {
                    LOG.info("Closed channel " + channel);
                }
            }
        }
        if (charsRead == -1) {
            return null;
        } else if (charsRead > 0) {
            buffer.flip();

            Charset charset = Charset.forName(Morphy.getInstance().getMorphyPreferences()
                    .getString(PreferenceKeys.SocketConnectionServiceCharEncoding));

            SocketChannelUserSession socketChannelUserSession = socketToSession.get(channel.socket());

            byte[] bytes = buffer.array();
            buffer.position(0);

            System.out.println("IN: " + new String(bytes).trim());
            if (looksLikeTimesealInit(bytes)) {
                if (socketChannelUserSession.usingTimeseal == false) {
                    // First time?
                    socketChannelUserSession.usingTimeseal = true;
                    return MSG_TIMESEAL_OK;
                }
            }

            if (socketChannelUserSession.usingTimeseal) {
                /*
                 * Clients may pass multiple Timeseal-encoded messages at once.
                 * We need to parse each separated message to Timeseal decoder as necessary. 
                 */

                byte[] bytesToDecode = Arrays.copyOfRange(bytes, 0, charsRead - 1 /* \n or 10 */);
                byte[][] splitBytes = TimesealCoder.splitBytes(bytesToDecode, (byte) 10);

                buffer = ByteBuffer.allocate(bytesToDecode.length);
                buffer.position(0);
                for (int i = 0; i < splitBytes.length; i++) {
                    byte[] splitBytesToDecode = splitBytes[i];
                    TimesealParseResult parseResult = timesealCoder.decode(splitBytesToDecode);
                    if (parseResult != null) {
                        System.out.println(parseResult.getTimestamp());
                        parseResult.setMessage(parseResult.getMessage() + "\n");
                        System.out.println(parseResult.getMessage());

                        buffer.put(parseResult.getMessage().getBytes(charset));
                    }
                }
                //buffer.position(0);
                buffer.flip();
            }

            CharsetDecoder decoder = charset.newDecoder();
            CharBuffer charBuffer = decoder.decode(buffer);
            String message = charBuffer.toString();
            return message;
            //System.out.println(message);
            //return "";
        } else {
            return "";
        }
    } catch (Throwable t) {
        if (LOG.isErrorEnabled())
            LOG.error("Error reading SocketChannel " + channel.socket().getLocalAddress(), t);
        return null;
    }
}

From source file:org.apache.hc.client5.http.impl.auth.CredSspScheme.java

@Override
public String generateAuthResponse(final HttpHost host, final HttpRequest request, final HttpContext context)
        throws AuthenticationException {
    if (ntcredentials == null) {
        throw new AuthenticationException("NT credentials not available");
    }//from   ww  w .j  ava  2  s.c  om

    final String outputString;

    if (state == State.UNINITIATED) {
        beginTlsHandshake();
        outputString = wrapHandshake();
        state = State.TLS_HANDSHAKE;

    } else if (state == State.TLS_HANDSHAKE) {
        outputString = wrapHandshake();

    } else if (state == State.TLS_HANDSHAKE_FINISHED) {

        final int ntlmFlags = getNtlmFlags();
        final ByteBuffer buf = allocateOutBuffer();
        type1Message = new NTLMEngineImpl.Type1Message(ntcredentials.getNetbiosDomain(),
                ntcredentials.getWorkstation(), ntlmFlags);
        final byte[] ntlmNegoMessageEncoded = type1Message.getBytes();
        final CredSspTsRequest req = CredSspTsRequest.createNegoToken(ntlmNegoMessageEncoded);
        req.encode(buf);
        buf.flip();
        outputString = wrap(buf);
        state = State.NEGO_TOKEN_SENT;

    } else if (state == State.NEGO_TOKEN_RECEIVED) {
        final ByteBuffer buf = allocateOutBuffer();
        type2Message = new NTLMEngineImpl.Type2Message(lastReceivedTsRequest.getNegoToken());

        final Certificate peerServerCertificate = getPeerServerCertificate();

        type3Message = new NTLMEngineImpl.Type3Message(ntcredentials.getNetbiosDomain(),
                ntcredentials.getWorkstation(), ntcredentials.getUserName(), ntcredentials.getPassword(),
                type2Message.getChallenge(), type2Message.getFlags(), type2Message.getTarget(),
                type2Message.getTargetInfo(), peerServerCertificate, type1Message.getBytes(),
                type2Message.getBytes());

        final byte[] ntlmAuthenticateMessageEncoded = type3Message.getBytes();

        final byte[] exportedSessionKey = type3Message.getExportedSessionKey();

        ntlmOutgoingHandle = new NTLMEngineImpl.Handle(exportedSessionKey, NTLMEngineImpl.Mode.CLIENT, true);
        ntlmIncomingHandle = new NTLMEngineImpl.Handle(exportedSessionKey, NTLMEngineImpl.Mode.SERVER, true);

        final CredSspTsRequest req = CredSspTsRequest.createNegoToken(ntlmAuthenticateMessageEncoded);
        peerPublicKey = getSubjectPublicKeyDer(peerServerCertificate.getPublicKey());
        final byte[] pubKeyAuth = createPubKeyAuth();
        req.setPubKeyAuth(pubKeyAuth);

        req.encode(buf);
        buf.flip();
        outputString = wrap(buf);
        state = State.PUB_KEY_AUTH_SENT;

    } else if (state == State.PUB_KEY_AUTH_RECEIVED) {
        verifyPubKeyAuthResponse(lastReceivedTsRequest.getPubKeyAuth());
        final byte[] authInfo = createAuthInfo(ntcredentials);
        final CredSspTsRequest req = CredSspTsRequest.createAuthInfo(authInfo);

        final ByteBuffer buf = allocateOutBuffer();
        req.encode(buf);
        buf.flip();
        outputString = wrap(buf);
        state = State.CREDENTIALS_SENT;
    } else {
        throw new AuthenticationException("Wrong state " + state);
    }
    return outputString;
}

From source file:com.colorchen.qbase.utils.FileUtil.java

/**
 * ?//w w w  . ja va2  s  . c  om
 *
 * @param outFile
 * @param files
 */
public static void mergeFiles(Context context, File outFile, List<File> files) {
    FileChannel outChannel = null;
    try {
        outChannel = new FileOutputStream(outFile).getChannel();
        for (File f : files) {
            FileChannel fc = new FileInputStream(f).getChannel();
            ByteBuffer bb = ByteBuffer.allocate(BUFSIZE);
            while (fc.read(bb) != -1) {
                bb.flip();
                outChannel.write(bb);
                bb.clear();
            }
            fc.close();
        }
        Log.d(TAG, "?");
    } catch (IOException ioe) {
        ioe.printStackTrace();
    } finally {
        try {
            if (outChannel != null) {
                outChannel.close();
            }
        } catch (IOException ignore) {
        }
    }
}

From source file:hivemall.mf.BPRMatrixFactorizationUDTF.java

private final void runIterativeTraining(@Nonnegative final int iterations) throws HiveException {
    final ByteBuffer inputBuf = this.inputBuf;
    final NioFixedSegment fileIO = this.fileIO;
    assert (inputBuf != null);
    assert (fileIO != null);
    final long numTrainingExamples = count;

    final Reporter reporter = getReporter();
    final Counter iterCounter = (reporter == null) ? null
            : reporter.getCounter("hivemall.mf.BPRMatrixFactorization$Counter", "iteration");

    try {/*  w  ww .j  ava 2s  . c  om*/
        if (lastWritePos == 0) {// run iterations w/o temporary file
            if (inputBuf.position() == 0) {
                return; // no training example
            }
            inputBuf.flip();

            int iter = 2;
            for (; iter <= iterations; iter++) {
                reportProgress(reporter);
                setCounterValue(iterCounter, iter);

                while (inputBuf.remaining() > 0) {
                    int u = inputBuf.getInt();
                    int i = inputBuf.getInt();
                    int j = inputBuf.getInt();
                    // invoke train
                    count++;
                    train(u, i, j);
                }
                cvState.multiplyLoss(0.5d);
                cvState.logState(iter, eta());
                if (cvState.isConverged(iter, numTrainingExamples)) {
                    break;
                }
                if (cvState.isLossIncreased()) {
                    etaEstimator.update(1.1f);
                } else {
                    etaEstimator.update(0.5f);
                }
                inputBuf.rewind();
            }
            LOG.info("Performed " + Math.min(iter, iterations) + " iterations of "
                    + NumberUtils.formatNumber(numTrainingExamples) + " training examples on memory (thus "
                    + NumberUtils.formatNumber(count) + " training updates in total) ");
        } else {// read training examples in the temporary file and invoke train for each example

            // write training examples in buffer to a temporary file
            if (inputBuf.position() > 0) {
                writeBuffer(inputBuf, fileIO, lastWritePos);
            } else if (lastWritePos == 0) {
                return; // no training example
            }
            try {
                fileIO.flush();
            } catch (IOException e) {
                throw new HiveException("Failed to flush a file: " + fileIO.getFile().getAbsolutePath(), e);
            }
            if (LOG.isInfoEnabled()) {
                File tmpFile = fileIO.getFile();
                LOG.info(
                        "Wrote " + numTrainingExamples + " records to a temporary file for iterative training: "
                                + tmpFile.getAbsolutePath() + " (" + FileUtils.prettyFileSize(tmpFile) + ")");
            }

            // run iterations
            int iter = 2;
            for (; iter <= iterations; iter++) {
                setCounterValue(iterCounter, iter);

                inputBuf.clear();
                long seekPos = 0L;
                while (true) {
                    reportProgress(reporter);
                    // TODO prefetch
                    // writes training examples to a buffer in the temporary file
                    final int bytesRead;
                    try {
                        bytesRead = fileIO.read(seekPos, inputBuf);
                    } catch (IOException e) {
                        throw new HiveException("Failed to read a file: " + fileIO.getFile().getAbsolutePath(),
                                e);
                    }
                    if (bytesRead == 0) { // reached file EOF
                        break;
                    }
                    assert (bytesRead > 0) : bytesRead;
                    seekPos += bytesRead;

                    // reads training examples from a buffer
                    inputBuf.flip();
                    int remain = inputBuf.remaining();
                    assert (remain > 0) : remain;
                    for (; remain >= RECORD_BYTES; remain -= RECORD_BYTES) {
                        int u = inputBuf.getInt();
                        int i = inputBuf.getInt();
                        int j = inputBuf.getInt();
                        // invoke train
                        count++;
                        train(u, i, j);
                    }
                    inputBuf.compact();
                }
                cvState.multiplyLoss(0.5d);
                cvState.logState(iter, eta());
                if (cvState.isConverged(iter, numTrainingExamples)) {
                    break;
                }
                if (cvState.isLossIncreased()) {
                    etaEstimator.update(1.1f);
                } else {
                    etaEstimator.update(0.5f);
                }
            }
            LOG.info("Performed " + Math.min(iter, iterations) + " iterations of "
                    + NumberUtils.formatNumber(numTrainingExamples)
                    + " training examples using a secondary storage (thus " + NumberUtils.formatNumber(count)
                    + " training updates in total)");
        }
    } finally {
        // delete the temporary file and release resources
        try {
            fileIO.close(true);
        } catch (IOException e) {
            throw new HiveException("Failed to close a file: " + fileIO.getFile().getAbsolutePath(), e);
        }
        this.inputBuf = null;
        this.fileIO = null;
    }
}

From source file:edu.hawaii.soest.kilonalu.flntu.FLNTUSource.java

/**
 * A method that executes the streaming of data from the source to the RBNB
 * server after all configuration of settings, connections to hosts, and
 * thread initiatizing occurs.  This method contains the detailed code for 
 * streaming the data and interpreting the stream.
 *//*from  www  .  ja  v a  2 s  . c o  m*/
protected boolean execute() {
    logger.debug("FLNTUSource.execute() called.");
    // do not execute the stream if there is no connection
    if (!isConnected())
        return false;

    boolean failed = false;

    SocketChannel socket = getSocketConnection();

    // while data are being sent, read them into the buffer
    try {
        // create four byte placeholders used to evaluate up to a four-byte 
        // window.  The FIFO layout looks like:
        //           -------------------------
        //   in ---> | One | Two |Three|Four |  ---> out
        //           -------------------------
        byte byteOne = 0x00, // set initial placeholder values
                byteTwo = 0x00, byteThree = 0x00, byteFour = 0x00;

        // Create a buffer that will store the sample bytes as they are read
        ByteBuffer sampleBuffer = ByteBuffer.allocate(getBufferSize());

        // create a byte buffer to store bytes from the TCP stream
        ByteBuffer buffer = ByteBuffer.allocateDirect(getBufferSize());

        // add a channel of data that will be pushed to the server.  
        // Each sample will be sent to the Data Turbine as an rbnb frame.
        ChannelMap rbnbChannelMap = new ChannelMap();

        // while there are bytes to read from the socket ...
        while (socket.read(buffer) != -1 || buffer.position() > 0) {

            // prepare the buffer for reading
            buffer.flip();

            // while there are unread bytes in the ByteBuffer
            while (buffer.hasRemaining()) {
                byteOne = buffer.get();
                logger.debug("char: " + (char) byteOne + "\t" + "b1: "
                        + new String(Hex.encodeHex((new byte[] { byteOne }))) + "\t" + "b2: "
                        + new String(Hex.encodeHex((new byte[] { byteTwo }))) + "\t" + "b3: "
                        + new String(Hex.encodeHex((new byte[] { byteThree }))) + "\t" + "b4: "
                        + new String(Hex.encodeHex((new byte[] { byteFour }))) + "\t" + "sample pos: "
                        + sampleBuffer.position() + "\t" + "sample rem: " + sampleBuffer.remaining() + "\t"
                        + "sample cnt: " + sampleByteCount + "\t" + "buffer pos: " + buffer.position() + "\t"
                        + "buffer rem: " + buffer.remaining() + "\t" + "state: " + state);

                // Use a State Machine to process the byte stream.
                switch (state) {

                case 0:

                    // sample sets begin with 'mvs 1\r\n' and end with 'mvs 0\r\n'.  Find the 
                    // beginning of the sample set using the 4-byte window (s 1\r\n)
                    // note bytes are in reverse order in the FIFO window
                    if (byteOne == 0x0A && byteTwo == 0x0D && byteThree == 0x31 && byteFour == 0x20) {
                        // we've found the beginning of a sample set, move on
                        state = 1;
                        break;

                    } else {
                        break;
                    }

                case 1: // read the rest of the bytes to the next EOL characters

                    // sample line is terminated by record delimiter byte (\r\n)
                    // note bytes are in reverse order in the FIFO window
                    if (byteOne == 0x0A && byteTwo == 0x0D && byteThree == 0x30 && byteFour == 0x20) {

                        // we've found the sample set ending, clear buffers and return
                        // to state 0 to wait for the next set
                        byteOne = 0x00;
                        byteTwo = 0x00;
                        byteThree = 0x00;
                        byteFour = 0x00;
                        sampleBuffer.clear();
                        sampleByteCount = 0;
                        rbnbChannelMap.Clear();
                        logger.debug("Cleared b1,b2,b3,b4. Cleared sampleBuffer. Cleared rbnbChannelMap.");
                        state = 0;

                        // if we're not at the sample set end, look for individual samples    
                    } else if (byteOne == 0x0A && byteTwo == 0x0D) {

                        // found the sample ending delimiter
                        // add in the sample delimiter to the sample buffer
                        if (sampleBuffer.remaining() > 0) {
                            sampleBuffer.put(byteOne);
                            sampleByteCount++;
                        } else {
                            sampleBuffer.compact();
                            logger.debug("Compacting sampleBuffer ...");
                            sampleBuffer.put(byteOne);
                            sampleByteCount++;

                        }

                        // extract just the length of the sample bytes out of the
                        // sample buffer, and place it in the channel map as a 
                        // byte array.  Then, send it to the data turbine.
                        byte[] sampleArray = new byte[sampleByteCount];
                        sampleBuffer.flip();
                        sampleBuffer.get(sampleArray);

                        // send the sample to the data turbine
                        rbnbChannelMap.PutTimeAuto("server");
                        String sampleString = new String(sampleArray, "US-ASCII");
                        int channelIndex = rbnbChannelMap.Add(getRBNBChannelName());
                        rbnbChannelMap.PutMime(channelIndex, "text/plain");
                        rbnbChannelMap.PutDataAsString(channelIndex, sampleString);
                        getSource().Flush(rbnbChannelMap);
                        logger.info("Sample: " + sampleString.substring(0, sampleString.length() - 2)
                                + " sent data to the DataTurbine. ");
                        byteOne = 0x00;
                        byteTwo = 0x00;
                        byteThree = 0x00;
                        byteFour = 0x00;
                        sampleBuffer.clear();
                        sampleByteCount = 0;
                        rbnbChannelMap.Clear();
                        logger.debug("Cleared b1,b2,b3,b4. Cleared sampleBuffer. Cleared rbnbChannelMap.");
                        break;

                    } else { // not 0x0

                        // still in the middle of the sample, keep adding bytes
                        sampleByteCount++; // add each byte found

                        if (sampleBuffer.remaining() > 0) {
                            sampleBuffer.put(byteOne);
                        } else {
                            sampleBuffer.compact();
                            logger.debug("Compacting sampleBuffer ...");
                            sampleBuffer.put(byteOne);

                        }

                        break;
                    } // end if for 0x0D20 EOL

                } // end switch statement

                // shift the bytes in the FIFO window
                byteFour = byteThree;
                byteThree = byteTwo;
                byteTwo = byteOne;

            } //end while (more unread bytes)

            // prepare the buffer to read in more bytes from the stream
            buffer.compact();

        } // end while (more socket bytes to read)
        socket.close();

    } catch (IOException e) {
        // handle exceptions
        // In the event of an i/o exception, log the exception, and allow execute()
        // to return false, which will prompt a retry.
        failed = true;
        e.printStackTrace();
        return !failed;
    } catch (SAPIException sapie) {
        // In the event of an RBNB communication  exception, log the exception, 
        // and allow execute() to return false, which will prompt a retry.
        failed = true;
        sapie.printStackTrace();
        return !failed;
    }

    return !failed;
}

From source file:io.warp10.continuum.gts.GTSDecoder.java

/**
 * Attempt to read the next measurement and associated metadata (timestamp, location, elevation)
 * @return true if a measurement was successfully read, false if none were left in the buffer.
 *//*from   w w  w  .  ja  v a2s .  co  m*/
public boolean next() {

    //
    // Update position prior to reading the next value, etc so we can 
    //

    this.position = this.buffer.position();

    if (!buffer.hasRemaining()) {
        return false;
    }

    this.nextCalled = true;

    //
    // Read timestamp/type flag
    //

    byte tsTypeFlag = buffer.get();

    //
    // Check if we encountered encrypted data
    //

    if (GTSEncoder.FLAGS_ENCRYPTED == (tsTypeFlag & GTSEncoder.FLAGS_MASK_ENCRYPTED)) {
        //
        // Extract encrypted length
        //

        int enclen = (int) Varint.decodeUnsignedLong(buffer);

        //
        // If there is no decryption key, simply skip the encrypted data
        // and call next recursively.
        //

        if (null == wrappingKey) {
            buffer.position(buffer.position() + enclen);

            // WARNING(hbs): if there are many encrypted chunks this may lead to a stack overflow
            return next();
        }

        byte[] encrypted = new byte[enclen];
        buffer.get(encrypted);

        //
        // Decrypt the encrypted data
        //

        AESWrapEngine engine = new AESWrapEngine();
        CipherParameters params = new KeyParameter(this.wrappingKey);
        engine.init(false, params);

        try {
            byte[] decrypted = engine.unwrap(encrypted, 0, encrypted.length);
            //
            // Unpad the decrypted data
            //

            PKCS7Padding padding = new PKCS7Padding();
            int padcount = padding.padCount(decrypted);

            //
            // Replace the current buffer with a new one containing the
            // decrypted data followed by any remaining data in the original
            // buffer.
            //

            ByteBuffer bb = ByteBuffer.allocate(decrypted.length - padcount + this.buffer.remaining());

            bb.put(decrypted, 0, decrypted.length - padcount);
            bb.put(this.buffer);
            bb.flip();

            this.buffer = bb;
        } catch (InvalidCipherTextException icte) {
            // FIXME(hbs): log this somewhere...
            //
            // Skip the encrypted chunk we failed to decrypt
            //
        }

        //
        // Call next recursively
        //
        // WARNING(hbs): we may hit StackOverflow in some cases

        return next();
    }

    //
    // Read location/elevation flag if needed
    //

    byte locElevFlag = 0x0;

    if (GTSEncoder.FLAGS_CONTINUATION == (tsTypeFlag & GTSEncoder.FLAGS_CONTINUATION)) {
        if (!buffer.hasRemaining()) {
            return false;
        }

        locElevFlag = buffer.get();
    }

    //
    // Read timestamp
    //

    switch (tsTypeFlag & GTSEncoder.FLAGS_MASK_TIMESTAMP) {
    case GTSEncoder.FLAGS_TIMESTAMP_RAW_ABSOLUTE: {
        ByteOrder order = buffer.order();
        buffer.order(ByteOrder.BIG_ENDIAN);
        previousLastTimestamp = lastTimestamp;
        lastTimestamp = buffer.getLong();
        buffer.order(order);
    }
        break;
    //case GTSEncoder.FLAGS_TIMESTAMP_ZIGZAG_ABSOLUTE:
    //  previousLastTimestamp = lastTimestamp;
    //  lastTimestamp = Varint.decodeSignedLong(buffer);
    //  break;
    case GTSEncoder.FLAGS_TIMESTAMP_EQUALS_BASE:
        previousLastTimestamp = lastTimestamp;
        lastTimestamp = baseTimestamp;
        break;
    case GTSEncoder.FLAGS_TIMESTAMP_ZIGZAG_DELTA_BASE: {
        long delta = Varint.decodeSignedLong(buffer);
        previousLastTimestamp = lastTimestamp;
        lastTimestamp = baseTimestamp + delta;
    }
        break;
    case GTSEncoder.FLAGS_TIMESTAMP_ZIGZAG_DELTA_PREVIOUS: {
        long delta = Varint.decodeSignedLong(buffer);
        previousLastTimestamp = lastTimestamp;
        lastTimestamp = lastTimestamp + delta;
    }
        break;
    default:
        throw new RuntimeException("Invalid timestamp format.");
    }

    //
    // Read location/elevation
    //

    if (GTSEncoder.FLAGS_LOCATION == (locElevFlag & GTSEncoder.FLAGS_LOCATION)) {
        if (GTSEncoder.FLAGS_LOCATION_IDENTICAL != (locElevFlag & GTSEncoder.FLAGS_LOCATION_IDENTICAL)) {
            if (GTSEncoder.FLAGS_LOCATION_GEOXPPOINT_ZIGZAG_DELTA == (locElevFlag
                    & GTSEncoder.FLAGS_LOCATION_GEOXPPOINT_ZIGZAG_DELTA)) {
                long delta = Varint.decodeSignedLong(buffer);
                previousLastGeoXPPoint = lastGeoXPPoint;
                lastGeoXPPoint = lastGeoXPPoint + delta;
            } else {
                ByteOrder order = buffer.order();
                buffer.order(ByteOrder.BIG_ENDIAN);
                previousLastGeoXPPoint = lastGeoXPPoint;
                lastGeoXPPoint = buffer.getLong();
                buffer.order(order);
            }
        }
    } else {
        previousLastGeoXPPoint = lastGeoXPPoint;
        lastGeoXPPoint = GeoTimeSerie.NO_LOCATION;
    }

    if (GTSEncoder.FLAGS_ELEVATION == (locElevFlag & GTSEncoder.FLAGS_ELEVATION)) {
        if (GTSEncoder.FLAGS_ELEVATION_IDENTICAL != (locElevFlag & GTSEncoder.FLAGS_ELEVATION_IDENTICAL)) {
            boolean zigzag = GTSEncoder.FLAGS_ELEVATION_ZIGZAG == (locElevFlag
                    & GTSEncoder.FLAGS_ELEVATION_ZIGZAG);

            long encoded;

            if (zigzag) {
                encoded = Varint.decodeSignedLong(buffer);
            } else {
                ByteOrder order = buffer.order();
                buffer.order(ByteOrder.BIG_ENDIAN);
                encoded = buffer.getLong();
                buffer.order(order);
            }

            if (GTSEncoder.FLAGS_ELEVATION_DELTA_PREVIOUS == (locElevFlag
                    & GTSEncoder.FLAGS_ELEVATION_DELTA_PREVIOUS)) {
                previousLastElevation = lastElevation;
                lastElevation = lastElevation + encoded;
            } else {
                previousLastElevation = lastElevation;
                lastElevation = encoded;
            }
        }
    } else {
        previousLastElevation = lastElevation;
        lastElevation = GeoTimeSerie.NO_ELEVATION;
    }

    //
    // Extract value
    //

    switch (tsTypeFlag & GTSEncoder.FLAGS_MASK_TYPE) {
    case GTSEncoder.FLAGS_TYPE_LONG:
        lastType = TYPE.LONG;
        if (GTSEncoder.FLAGS_VALUE_IDENTICAL != (tsTypeFlag & GTSEncoder.FLAGS_VALUE_IDENTICAL)) {
            long encoded;

            if (GTSEncoder.FLAGS_LONG_ZIGZAG == (tsTypeFlag & GTSEncoder.FLAGS_LONG_ZIGZAG)) {
                encoded = Varint.decodeSignedLong(buffer);
            } else {
                ByteOrder order = buffer.order();
                buffer.order(ByteOrder.BIG_ENDIAN);
                encoded = buffer.getLong();
                buffer.order(order);
            }

            if (GTSEncoder.FLAGS_LONG_DELTA_PREVIOUS == (tsTypeFlag & GTSEncoder.FLAGS_LONG_DELTA_PREVIOUS)) {
                previousLastLongValue = lastLongValue;
                lastLongValue = lastLongValue + encoded;
            } else {
                previousLastLongValue = lastLongValue;
                lastLongValue = encoded;
            }
        } else {
            previousLastLongValue = lastLongValue;
        }
        break;

    case GTSEncoder.FLAGS_TYPE_DOUBLE:
        lastType = TYPE.DOUBLE;
        if (GTSEncoder.FLAGS_VALUE_IDENTICAL != (tsTypeFlag & GTSEncoder.FLAGS_VALUE_IDENTICAL)) {
            if (GTSEncoder.FLAGS_DOUBLE_IEEE754 == (tsTypeFlag & GTSEncoder.FLAGS_DOUBLE_IEEE754)) {
                ByteOrder order = buffer.order();
                buffer.order(ByteOrder.BIG_ENDIAN);
                previousLastDoubleValue = lastDoubleValue;
                lastDoubleValue = buffer.getDouble();
                previousLastBDValue = lastBDValue;
                lastBDValue = null;
                buffer.order(order);
            } else {
                int scale = buffer.get();
                long unscaled = Varint.decodeSignedLong(buffer);
                previousLastBDValue = lastBDValue;
                lastBDValue = new BigDecimal(new BigInteger(Long.toString(unscaled)), scale);
            }
        } else {
            previousLastDoubleValue = lastDoubleValue;
            previousLastBDValue = lastBDValue;
        }
        break;

    case GTSEncoder.FLAGS_TYPE_STRING:
        lastType = TYPE.STRING;
        if (GTSEncoder.FLAGS_VALUE_IDENTICAL != (tsTypeFlag & GTSEncoder.FLAGS_VALUE_IDENTICAL)) {
            // Decode String length
            long len = Varint.decodeUnsignedLong(buffer);

            // Prevent excessive allocation
            if (len > buffer.remaining()) {
                throw new RuntimeException("Invalid string length.");
            }

            byte[] utf8 = new byte[(int) len];
            // Read String UTF8 representation
            buffer.get(utf8);
            previousLastStringValue = lastStringValue;
            lastStringValue = new String(utf8, Charsets.UTF_8);
        } else {
            previousLastStringValue = lastStringValue;
        }
        break;

    case GTSEncoder.FLAGS_TYPE_BOOLEAN:
        if (GTSEncoder.FLAGS_DELETE_MARKER == (tsTypeFlag & GTSEncoder.FLAGS_MASK_TYPE_FLAGS)) {
            lastType = TYPE.UNDEFINED;
        } else {
            lastType = TYPE.BOOLEAN;

            if (GTSEncoder.FLAGS_BOOLEAN_VALUE_TRUE == (tsTypeFlag & GTSEncoder.FLAGS_MASK_TYPE_FLAGS)) {
                lastBooleanValue = true;
            } else if (GTSEncoder.FLAGS_BOOLEAN_VALUE_FALSE == (tsTypeFlag
                    & GTSEncoder.FLAGS_MASK_TYPE_FLAGS)) {
                lastBooleanValue = false;
            } else {
                throw new RuntimeException("Invalid boolean value.");
            }
            //lastBooleanValue = GTSEncoder.FLAGS_BOOLEAN_VALUE == (tsTypeFlag & GTSEncoder.FLAGS_BOOLEAN_VALUE);
        }
        break;

    default:
        throw new RuntimeException("Invalid type encountered!");
    }

    return true;
}

From source file:edu.hawaii.soest.kilonalu.adcp.EnsembleFixedLeader.java

/**
 *  Constructor.  This method populates the Fixed Leader fields from 
 *  the given ByteBuffer of data passed in as an argument, based on metadata 
 *  found in the EnsembleHeader./*from   ww  w . j  a v a  2  s. c om*/
 *
 * @param ensembleBuffer the ByteBuffer that contains the binary ensemble data
 * @param ensemble  the parent ensemble for this fixed leader
 */
public EnsembleFixedLeader(ByteBuffer ensembleBuffer, Ensemble ensemble) {

    // prepare the ensemble buffer for reading
    ensembleBuffer.flip();
    ensembleBuffer.limit(ensembleBuffer.capacity());

    // position the cursor at the correct offset given the sequential location
    // of the fixed leader in the data stream.
    int typeNumber = ensemble.getDataTypeNumber(EnsembleDataType.FIXED_LEADER);
    int offset = ensemble.getDataTypeOffset(typeNumber);
    ensembleBuffer.position(offset);

    // define the temporary arrays for passing bytes
    byte[] oneByte = new byte[1];
    byte[] twoBytes = new byte[2];

    // set all of the FixedLeader fields in the order that they are read from 
    // the byte stream
    ensembleBuffer.get(twoBytes);
    setFixedLeaderID(twoBytes);
    ensemble.addToByteSum(twoBytes);
    ensembleBuffer.get(oneByte);
    setCpuFirmwareVersion(oneByte);
    ensemble.addToByteSum(oneByte);
    ensembleBuffer.get(oneByte);
    setCpuFirmwareRevision(oneByte);
    ensemble.addToByteSum(oneByte);
    ensembleBuffer.get(twoBytes);
    setSystemConfiguration(twoBytes);
    ensemble.addToByteSum(twoBytes);
    ensembleBuffer.get(oneByte);
    setPdRealOrSimulatedFlag(oneByte);
    ensemble.addToByteSum(oneByte);
    ensembleBuffer.get(oneByte);
    setLagLength(oneByte);
    ensemble.addToByteSum(oneByte);
    ensembleBuffer.get(oneByte);
    setNumberOfBeams(oneByte);
    ensemble.addToByteSum(oneByte);
    ensembleBuffer.get(oneByte);
    setNumberOfCells(oneByte);
    ensemble.addToByteSum(oneByte);
    ensembleBuffer.get(twoBytes);
    setPingsPerEnsemble(twoBytes);
    ensemble.addToByteSum(twoBytes);
    ensembleBuffer.get(twoBytes);
    setDepthCellLength(twoBytes);
    ensemble.addToByteSum(twoBytes);
    ensembleBuffer.get(twoBytes);
    setBlankAfterTransmit(twoBytes);
    ensemble.addToByteSum(twoBytes);
    ensembleBuffer.get(oneByte);
    setProfilingMode(oneByte);
    ensemble.addToByteSum(oneByte);
    ensembleBuffer.get(oneByte);
    setLowCorrelationThreshold(oneByte);
    ensemble.addToByteSum(oneByte);
    ensembleBuffer.get(oneByte);
    setNumberOfCodeRepetitions(oneByte);
    ensemble.addToByteSum(oneByte);
    ensembleBuffer.get(oneByte);
    setPercentGoodMinimum(oneByte);
    ensemble.addToByteSum(oneByte);
    ensembleBuffer.get(twoBytes);
    setErrorVelocityThreshold(twoBytes);
    ensemble.addToByteSum(twoBytes);
    ensembleBuffer.get(oneByte);
    setPingMinutes(oneByte);
    ensemble.addToByteSum(oneByte);
    ensembleBuffer.get(oneByte);
    setPingSeconds(oneByte);
    ensemble.addToByteSum(oneByte);
    ensembleBuffer.get(oneByte);
    setPingHundredths(oneByte);
    ensemble.addToByteSum(oneByte);
    ensembleBuffer.get(oneByte);
    setCoordinateTransformParams(oneByte);
    ensemble.addToByteSum(oneByte);
    ensembleBuffer.get(twoBytes);
    setHeadingAlignment(twoBytes);
    ensemble.addToByteSum(twoBytes);
    ensembleBuffer.get(twoBytes);
    setHeadingBias(twoBytes);
    ensemble.addToByteSum(twoBytes);
    ensembleBuffer.get(oneByte);
    setSensorSource(oneByte);
    ensemble.addToByteSum(oneByte);
    ensembleBuffer.get(oneByte);
    setSensorAvailability(oneByte);
    ensemble.addToByteSum(oneByte);
    ensembleBuffer.get(twoBytes);
    setBinOneDistance(twoBytes);
    ensemble.addToByteSum(twoBytes);
    ensembleBuffer.get(twoBytes);
    setTransmitPulseLength(twoBytes);
    ensemble.addToByteSum(twoBytes);
    ensembleBuffer.get(oneByte);
    setReferenceLayerStart(oneByte);
    ensemble.addToByteSum(oneByte);
    ensembleBuffer.get(oneByte);
    setReferenceLayerEnd(oneByte);
    ensemble.addToByteSum(oneByte);
    ensembleBuffer.get(oneByte);
    setFalseTargetThreshold(oneByte);
    ensemble.addToByteSum(oneByte);
    ensembleBuffer.get(oneByte);
    setFixedLeaderSpare(oneByte);
    ensemble.addToByteSum(oneByte);
    ensembleBuffer.get(twoBytes);
    setTransmitLagDistance(twoBytes);
    ensemble.addToByteSum(twoBytes);
    byte[] boardSerialNumber = new byte[8];
    ensembleBuffer.get(boardSerialNumber); // read 8 bytes
    setCpuBoardSerialNumber(boardSerialNumber);
    ensemble.addToByteSum(boardSerialNumber);
    ensembleBuffer.get(twoBytes);
    setSystemBandwidth(twoBytes);
    ensemble.addToByteSum(twoBytes);
    ensembleBuffer.get(oneByte);
    setSystemPower(oneByte);
    ensemble.addToByteSum(oneByte);

    // the following don't get called for Workhorse ADCPs
    // TODO: test for model and add fields if necessary

    //ensembleBuffer.get(oneByte);
    //setBaseFrequencyIndex(oneByte);
    //ensemble.addToByteSum(oneByte);
    //byte[] instrumentSerialNumber = new byte[4];
    //ensembleBuffer.get(instrumentSerialNumber);  // read 4 bytes
    //setSerialNumber(instrumentSerialNumber);
    //ensemble.addToByteSum(instrumentSerialNumber);
    //ensembleBuffer.get(oneByte);
    //setBeamAngle(oneByte);
    //ensemble.addToByteSum(oneByte);

}

From source file:hivemall.recommend.SlimUDTF.java

private void runIterativeTraining() throws HiveException {
    final ByteBuffer buf = this._inputBuf;
    final NioStatefulSegment dst = this._fileIO;
    assert (buf != null);
    assert (dst != null);

    final Reporter reporter = getReporter();
    final Counters.Counter iterCounter = (reporter == null) ? null
            : reporter.getCounter("hivemall.recommend.slim$Counter", "iteration");

    try {//  w w w  . j av  a2  s  .com
        if (dst.getPosition() == 0L) {// run iterations w/o temporary file
            if (buf.position() == 0) {
                return; // no training example
            }
            buf.flip();
            for (int iter = 2; iter < numIterations; iter++) {
                _cvState.next();
                reportProgress(reporter);
                setCounterValue(iterCounter, iter);

                while (buf.remaining() > 0) {
                    int recordBytes = buf.getInt();
                    assert (recordBytes > 0) : recordBytes;
                    replayTrain(buf);
                }
                buf.rewind();
                if (_cvState.isConverged(_observedTrainingExamples)) {
                    break;
                }
            }
            logger.info("Performed " + _cvState.getCurrentIteration() + " iterations of "
                    + NumberUtils.formatNumber(_observedTrainingExamples)
                    + " training examples on memory (thus "
                    + NumberUtils.formatNumber(_observedTrainingExamples * _cvState.getCurrentIteration())
                    + " training updates in total) ");

        } else { // read training examples in the temporary file and invoke train for each example
            // write KNNi in buffer to a temporary file
            if (buf.remaining() > 0) {
                writeBuffer(buf, dst);
            }

            try {
                dst.flush();
            } catch (IOException e) {
                throw new HiveException("Failed to flush a file: " + dst.getFile().getAbsolutePath(), e);
            }

            if (logger.isInfoEnabled()) {
                File tmpFile = dst.getFile();
                logger.info("Wrote KNN entries of axis items to a temporary file for iterative training: "
                        + tmpFile.getAbsolutePath() + " (" + FileUtils.prettyFileSize(tmpFile) + ")");
            }

            // run iterations
            for (int iter = 2; iter < numIterations; iter++) {
                _cvState.next();
                setCounterValue(iterCounter, iter);

                buf.clear();
                dst.resetPosition();
                while (true) {
                    reportProgress(reporter);
                    // load a KNNi to a buffer in the temporary file
                    final int bytesRead;
                    try {
                        bytesRead = dst.read(buf);
                    } catch (IOException e) {
                        throw new HiveException("Failed to read a file: " + dst.getFile().getAbsolutePath(), e);
                    }
                    if (bytesRead == 0) { // reached file EOF
                        break;
                    }
                    assert (bytesRead > 0) : bytesRead;

                    // reads training examples from a buffer
                    buf.flip();
                    int remain = buf.remaining();
                    if (remain < SizeOf.INT) {
                        throw new HiveException("Illegal file format was detected");
                    }
                    while (remain >= SizeOf.INT) {
                        int pos = buf.position();
                        int recordBytes = buf.getInt();
                        remain -= SizeOf.INT;
                        if (remain < recordBytes) {
                            buf.position(pos);
                            break;
                        }

                        replayTrain(buf);
                        remain -= recordBytes;
                    }
                    buf.compact();
                }
                if (_cvState.isConverged(_observedTrainingExamples)) {
                    break;
                }
            }
            logger.info("Performed " + _cvState.getCurrentIteration() + " iterations of "
                    + NumberUtils.formatNumber(_observedTrainingExamples)
                    + " training examples on memory and KNNi data on secondary storage (thus "
                    + NumberUtils.formatNumber(_observedTrainingExamples * _cvState.getCurrentIteration())
                    + " training updates in total) ");

        }
    } catch (Throwable e) {
        throw new HiveException("Exception caused in the iterative training", e);
    } finally {
        // delete the temporary file and release resources
        try {
            dst.close(true);
        } catch (IOException e) {
            throw new HiveException("Failed to close a file: " + dst.getFile().getAbsolutePath(), e);
        }
        this._inputBuf = null;
        this._fileIO = null;
    }
}

From source file:net.kungfoo.grizzly.proxy.impl.ConnectingHandler.java

public void outputReady(final NHttpClientConnection conn, final ContentEncoder encoder) {
    System.out.println(conn + " [proxy->origin] output ready");

    HttpContext context = conn.getContext();
    ProxyProcessingInfo proxyTask = (ProxyProcessingInfo) context.getAttribute(ProxyProcessingInfo.ATTRIB);

    synchronized (proxyTask) {
        ConnState connState = proxyTask.getOriginState();
        if (connState != ConnState.REQUEST_SENT && connState != ConnState.REQUEST_BODY_STREAM) {
            throw new IllegalStateException("Illegal target connection state: " + connState);
        }/*from w w  w . j a va2 s  .  co m*/

        try {

            // TODO: propper handling of POST
            ByteBuffer src = proxyTask.getInBuffer();
            final int srcSize = src.limit();
            if (src.position() != 0) {
                System.out.println(conn + " [proxy->origin] buff not consumed yet");
                return;
            }
            ByteChunk chunk = new ByteChunk(srcSize);
            Request originalRequest = proxyTask.getOriginalRequest();
            int read;
            int encRead = 0;
            long bytesWritten = 0;
            while ((read = originalRequest.doRead(chunk)) != -1) {
                System.out.println(conn + " [proxy->origin] " + read + " bytes read");
                if (read > srcSize) {
                    src = ByteBuffer.wrap(chunk.getBytes(), chunk.getOffset(), read);
                } else {
                    src.put(chunk.getBytes(), chunk.getOffset(), read);
                }
                src.flip();
                encRead = encoder.write(src);
                bytesWritten += encRead;
                src.compact();
                chunk.reset();
                if (encRead == 0) {
                    System.out.println(conn + " [proxy->origin] encoder refused to consume more");
                    break;
                } else {
                    System.out.println(conn + " [proxy->origin] " + encRead + " consumed by encoder");
                }
            }
            System.out.println(conn + " [proxy->origin] " + bytesWritten + " bytes written");
            System.out.println(conn + " [proxy->origin] " + encoder);
            src.compact();

            if (src.position() == 0 && encRead != 0) {
                encoder.complete();
            }
            // Update connection state
            if (encoder.isCompleted()) {
                System.out.println(conn + " [proxy->origin] request body sent");
                proxyTask.setOriginState(ConnState.REQUEST_BODY_DONE);
            } else {
                proxyTask.setOriginState(ConnState.REQUEST_BODY_STREAM);
            }

        } catch (IOException ex) {
            shutdownConnection(conn);
        }
    }
}

From source file:hivemall.topicmodel.ProbabilisticTopicModelBaseUDTF.java

protected final void runIterativeTraining(@Nonnegative final int iterations) throws HiveException {
    final ByteBuffer buf = this.inputBuf;
    final NioStatefulSegment dst = this.fileIO;
    assert (buf != null);
    assert (dst != null);
    final long numTrainingExamples = model.getDocCount();

    long numTrain = numTrainingExamples / miniBatchSize;
    if (numTrainingExamples % miniBatchSize != 0L) {
        numTrain++;//from  ww w. ja  v a 2 s.c  o  m
    }

    final Reporter reporter = getReporter();
    final Counters.Counter iterCounter = (reporter == null) ? null
            : reporter.getCounter("hivemall.topicmodel.ProbabilisticTopicModel$Counter", "iteration");

    try {
        if (dst.getPosition() == 0L) {// run iterations w/o temporary file
            if (buf.position() == 0) {
                return; // no training example
            }
            buf.flip();

            int iter = 2;
            float perplexity = cumPerplexity / numTrain;
            float perplexityPrev;
            for (; iter <= iterations; iter++) {
                perplexityPrev = perplexity;
                cumPerplexity = 0.f;

                reportProgress(reporter);
                setCounterValue(iterCounter, iter);

                while (buf.remaining() > 0) {
                    int recordBytes = buf.getInt();
                    assert (recordBytes > 0) : recordBytes;
                    int wcLength = buf.getInt();
                    final String[] wordCounts = new String[wcLength];
                    for (int j = 0; j < wcLength; j++) {
                        wordCounts[j] = NIOUtils.getString(buf);
                    }
                    update(wordCounts);
                }
                buf.rewind();

                // mean perplexity over `numTrain` mini-batches
                perplexity = cumPerplexity / numTrain;
                logger.info("Mean perplexity over mini-batches: " + perplexity);
                if (Math.abs(perplexityPrev - perplexity) < eps) {
                    break;
                }
            }
            logger.info("Performed " + Math.min(iter, iterations) + " iterations of "
                    + NumberUtils.formatNumber(numTrainingExamples) + " training examples on memory (thus "
                    + NumberUtils.formatNumber(numTrainingExamples * Math.min(iter, iterations))
                    + " training updates in total) ");
        } else {// read training examples in the temporary file and invoke train for each example
            // write training examples in buffer to a temporary file
            if (buf.remaining() > 0) {
                writeBuffer(buf, dst);
            }
            try {
                dst.flush();
            } catch (IOException e) {
                throw new HiveException("Failed to flush a file: " + dst.getFile().getAbsolutePath(), e);
            }
            if (logger.isInfoEnabled()) {
                File tmpFile = dst.getFile();
                logger.info(
                        "Wrote " + numTrainingExamples + " records to a temporary file for iterative training: "
                                + tmpFile.getAbsolutePath() + " (" + FileUtils.prettyFileSize(tmpFile) + ")");
            }

            // run iterations
            int iter = 2;
            float perplexity = cumPerplexity / numTrain;
            float perplexityPrev;
            for (; iter <= iterations; iter++) {
                perplexityPrev = perplexity;
                cumPerplexity = 0.f;

                setCounterValue(iterCounter, iter);

                buf.clear();
                dst.resetPosition();
                while (true) {
                    reportProgress(reporter);
                    // TODO prefetch
                    // writes training examples to a buffer in the temporary file
                    final int bytesRead;
                    try {
                        bytesRead = dst.read(buf);
                    } catch (IOException e) {
                        throw new HiveException("Failed to read a file: " + dst.getFile().getAbsolutePath(), e);
                    }
                    if (bytesRead == 0) { // reached file EOF
                        break;
                    }
                    assert (bytesRead > 0) : bytesRead;

                    // reads training examples from a buffer
                    buf.flip();
                    int remain = buf.remaining();
                    if (remain < SizeOf.INT) {
                        throw new HiveException("Illegal file format was detected");
                    }
                    while (remain >= SizeOf.INT) {
                        int pos = buf.position();
                        int recordBytes = buf.getInt();
                        remain -= SizeOf.INT;
                        if (remain < recordBytes) {
                            buf.position(pos);
                            break;
                        }

                        int wcLength = buf.getInt();
                        final String[] wordCounts = new String[wcLength];
                        for (int j = 0; j < wcLength; j++) {
                            wordCounts[j] = NIOUtils.getString(buf);
                        }
                        update(wordCounts);

                        remain -= recordBytes;
                    }
                    buf.compact();
                }

                // mean perplexity over `numTrain` mini-batches
                perplexity = cumPerplexity / numTrain;
                logger.info("Mean perplexity over mini-batches: " + perplexity);
                if (Math.abs(perplexityPrev - perplexity) < eps) {
                    break;
                }
            }
            logger.info("Performed " + Math.min(iter, iterations) + " iterations of "
                    + NumberUtils.formatNumber(numTrainingExamples)
                    + " training examples on a secondary storage (thus "
                    + NumberUtils.formatNumber(numTrainingExamples * Math.min(iter, iterations))
                    + " training updates in total)");
        }
    } catch (Throwable e) {
        throw new HiveException("Exception caused in the iterative training", e);
    } finally {
        // delete the temporary file and release resources
        try {
            dst.close(true);
        } catch (IOException e) {
            throw new HiveException("Failed to close a file: " + dst.getFile().getAbsolutePath(), e);
        }
        this.inputBuf = null;
        this.fileIO = null;
    }
}