List of usage examples for java.nio ByteBuffer compact
public abstract ByteBuffer compact();
From source file:eu.stratosphere.nephele.services.iomanager.IOManagerPerformanceBenchmark.java
@SuppressWarnings("resource") private final void speedTestNIO(int bufferSize, boolean direct) throws IOException { final Channel.ID tmpChannel = ioManager.createChannel(); File tempFile = null;//from ww w . j ava 2 s . c om FileChannel fs = null; try { tempFile = new File(tmpChannel.getPath()); RandomAccessFile raf = new RandomAccessFile(tempFile, "rw"); fs = raf.getChannel(); ByteBuffer buf = direct ? ByteBuffer.allocateDirect(bufferSize) : ByteBuffer.allocate(bufferSize); long writeStart = System.currentTimeMillis(); int valsLeft = NUM_INTS_WRITTEN; while (valsLeft-- > 0) { if (buf.remaining() < 4) { buf.flip(); fs.write(buf); buf.clear(); } buf.putInt(valsLeft); } if (buf.position() > 0) { buf.flip(); fs.write(buf); } fs.close(); raf.close(); fs = null; long writeElapsed = System.currentTimeMillis() - writeStart; // ---------------------------------------------------------------- raf = new RandomAccessFile(tempFile, "r"); fs = raf.getChannel(); buf.clear(); long readStart = System.currentTimeMillis(); fs.read(buf); buf.flip(); valsLeft = NUM_INTS_WRITTEN; while (valsLeft-- > 0) { if (buf.remaining() < 4) { buf.compact(); fs.read(buf); buf.flip(); } if (buf.getInt() != valsLeft) { throw new IOException(); } } fs.close(); raf.close(); long readElapsed = System.currentTimeMillis() - readStart; LOG.info("NIO Channel with buffer " + bufferSize + ": write " + writeElapsed + " msecs, read " + readElapsed + " msecs."); } finally { // close if possible if (fs != null) { fs.close(); fs = null; } // try to delete the file if (tempFile != null) { tempFile.delete(); } } }
From source file:co.elastic.tealess.SSLChecker.java
private void checkHandshake(SSLReport sslReport, SocketChannel socket) { final InetSocketAddress address = sslReport.getAddress(); final String name = sslReport.getHostname(); IOObserver ioObserver = new IOObserver(); ObservingSSLEngine sslEngine = new ObservingSSLEngine(ctx.createSSLEngine(name, address.getPort()), ioObserver);// www .j a v a2s.com sslReport.setIOObserver(ioObserver); sslEngine.setUseClientMode(true); try { sslEngine.beginHandshake(); } catch (SSLException e) { sslReport.setFailed(e); Throwable cause = Blame.get(e); logger.warn("beginHandshake failed: [{}] {}", cause.getClass(), cause.getMessage()); } // TODO: Is this enough bytes? int size = sslEngine.getSession().getApplicationBufferSize() * 2; ByteBuffer localText = ByteBuffer.allocate(size); ByteBuffer localWire = ByteBuffer.allocate(size); ByteBuffer peerText = ByteBuffer.allocate(size); ByteBuffer peerWire = ByteBuffer.allocate(size); // TODO: I wonder... do we need to send any data at all? localText.put("SSL TEST. HELLO.".getBytes()); localText.flip(); SSLEngineResult result; logger.info("Starting SSL handshake [{}] ", address); try { SSLEngineResult.HandshakeStatus state; state = sslEngine.getHandshakeStatus(); while (state != FINISHED) { // XXX: Use a Selector to wait for data. //logger.trace("State: {} [{}]", state, address); switch (state) { case NEED_TASK: sslEngine.getDelegatedTask().run(); state = sslEngine.getHandshakeStatus(); break; case NEED_WRAP: localWire.clear(); result = sslEngine.wrap(localText, localWire); state = result.getHandshakeStatus(); localWire.flip(); while (localWire.hasRemaining()) { socket.write(localWire); //logger.trace("Sent {} bytes [{}]", bytes, address); } localWire.compact(); break; case NEED_UNWRAP: // Try reading until we get data. Selector selector = Selector.open(); while (peerWire.position() == 0) { socket.read(peerWire); try { Thread.currentThread().sleep(5); } catch (InterruptedException e) { e.printStackTrace(); } } System.out.println("Read " + peerWire.position() + " bytes"); peerWire.flip(); result = sslEngine.unwrap(peerWire, peerText); state = result.getHandshakeStatus(); peerWire.compact(); break; } } } catch (IOException e) { sslReport.setFailed(e); sslReport.setSSLSession(sslEngine.getHandshakeSession()); sslReport.setPeerCertificateDetails(peerCertificateDetails); logger.warn("beginHandshake failed", e); return; } logger.info("handshake completed [{}]", address); // Handshake OK! sslReport.setSSLSession(sslEngine.getSession()); }
From source file:com.l2jfree.network.mmocore.ReadWriteThread.java
private void readPacket(SelectionKey key) { @SuppressWarnings("unchecked") T con = (T) key.attachment();/* w w w . j a v a 2 s . co m*/ ByteBuffer buf = con.getReadBuffer(); if (buf == null) { buf = getReadBuffer(); buf.clear(); } int readPackets = 0; int readBytes = 0; for (;;) { final int remainingFreeSpace = buf.remaining(); int result = -2; try { result = con.getReadableByteChannel().read(buf); } catch (IOException e) { //error handling goes bellow } switch (result) { case -2: // IOException { closeConnectionImpl(con, true); return; } case -1: // EOS { closeConnectionImpl(con, false); return; } default: { buf.flip(); // try to read as many packets as possible for (;;) { final int startPos = buf.position(); if (readPackets >= getMaxIncomingPacketsPerPass() || readBytes >= getMaxIncomingBytesPerPass()) break; if (!tryReadPacket2(con, buf)) break; readPackets++; readBytes += (buf.position() - startPos); } break; } } // stop reading, if we have reached a config limit if (readPackets >= getMaxIncomingPacketsPerPass() || readBytes >= getMaxIncomingBytesPerPass()) break; // if the buffer wasn't filled completely, we should stop trying as the input channel is empty if (remainingFreeSpace > result) break; // compact the buffer for reusing the remaining bytes if (buf.hasRemaining()) buf.compact(); else buf.clear(); } // check if there are some more bytes in buffer and allocate/compact to prevent content lose. if (buf.hasRemaining()) { if (buf == getReadBuffer()) { con.setReadBuffer(getPooledBuffer().put(getReadBuffer())); } else { buf.compact(); } } else { if (buf == getReadBuffer()) { // no additional buffers used } else { con.setReadBuffer(null); recycleBuffer(buf); } } }
From source file:org.lnicholls.galleon.togo.ToGo.java
public boolean Download(Video video, CancelDownload cancelDownload) { ServerConfiguration serverConfiguration = Server.getServer().getServerConfiguration(); GoBackConfiguration goBackConfiguration = Server.getServer().getGoBackConfiguration(); ArrayList videos = new ArrayList(); GetMethod get = null;/* w w w. ja va 2 s . c o m*/ try { URL url = new URL(video.getUrl()); Protocol protocol = new Protocol("https", new TiVoSSLProtocolSocketFactory(), 443); HttpClient client = new HttpClient(); // TODO How to get TiVo address?? client.getHostConfiguration().setHost(url.getHost(), 443, protocol); String password = Tools.decrypt(serverConfiguration.getMediaAccessKey()); if (video.getParentalControls() != null && video.getParentalControls().booleanValue()) { if (serverConfiguration.getPassword() == null) throw new NullPointerException("Parental Controls Password is null"); password = password + Tools.decrypt(serverConfiguration.getPassword()); } Credentials credentials = new UsernamePasswordCredentials("tivo", password); //client.getState().setCredentials("TiVo DVR", url.getHost(), credentials); client.getState().setCredentials(null, url.getHost(), credentials); get = new GetMethod(video.getUrl()); client.executeMethod(get); if (get.getStatusCode() != 200) { log.debug("Status code: " + get.getStatusCode()); return false; } InputStream input = get.getResponseBodyAsStream(); String path = serverConfiguration.getRecordingsPath(); File dir = new File(path); if (!dir.exists()) { dir.mkdirs(); } String name = getFilename(video); File file = null; if (goBackConfiguration.isGroupByShow()) { if (video.getSeriesTitle() != null && video.getSeriesTitle().trim().length() > 0) { path = path + File.separator + clean(video.getSeriesTitle()); File filePath = new File(path); if (!filePath.exists()) filePath.mkdirs(); file = new File(path + File.separator + name); } else file = new File(path + File.separator + name); } else { file = new File(path + File.separator + name); } // TODO Handle retransfers /* if (file.exists() && video.getStatus()!=Video.STATUS_DOWNLOADING) { log.debug("duplicate file: "+file); try { List list = VideoManager.findByPath(file.getCanonicalPath()); if (list!=null && list.size()>0) { video.setDownloadSize(file.length()); video.setDownloadTime(0); video.setPath(file.getCanonicalPath()); video.setStatus(Video.STATUS_DELETED); VideoManager.updateVideo(video); return true; } } catch (HibernateException ex) { log.error("Video update failed", ex); } } */ log.info("Downloading: " + name); WritableByteChannel channel = new FileOutputStream(file, false).getChannel(); long total = 0; double diff = 0.0; ByteBuffer buf = ByteBuffer.allocateDirect(1024 * 4); byte[] bytes = new byte[1024 * 4]; int amount = 0; int index = 0; long target = video.getSize(); long start = System.currentTimeMillis(); long last = start; while (amount == 0 && total < target) { while (amount >= 0 && !cancelDownload.cancel()) { if (index == amount) { amount = input.read(bytes); index = 0; total = total + amount; } while (index < amount && buf.hasRemaining()) { buf.put(bytes[index++]); } buf.flip(); int numWritten = channel.write(buf); if (buf.hasRemaining()) { buf.compact(); } else { buf.clear(); } if ((System.currentTimeMillis() - last > 10000) && (total > 0)) { try { video = VideoManager.retrieveVideo(video.getId()); if (video.getStatus() == Video.STATUS_DOWNLOADING) { diff = (System.currentTimeMillis() - start) / 1000.0; if (diff > 0) { video.setDownloadSize(total); video.setDownloadTime((int) diff); VideoManager.updateVideo(video); } } } catch (HibernateException ex) { log.error("Video update failed", ex); } last = System.currentTimeMillis(); } } if (cancelDownload.cancel()) { channel.close(); return false; } } diff = (System.currentTimeMillis() - start) / 1000.0; channel.close(); if (diff != 0) log.info("Download rate=" + (total / 1024) / diff + " KBps"); try { video.setPath(file.getCanonicalPath()); VideoManager.updateVideo(video); } catch (HibernateException ex) { log.error("Video update failed", ex); } } catch (MalformedURLException ex) { Tools.logException(ToGo.class, ex, video.getUrl()); return false; } catch (Exception ex) { Tools.logException(ToGo.class, ex, video.getUrl()); return false; } finally { if (get != null) get.releaseConnection(); } return true; }
From source file:edu.hawaii.soest.kilonalu.ctd.SBE37Source.java
/** * A method that executes the streaming of data from the source to the RBNB * server after all configuration of settings, connections to hosts, and * thread initiatizing occurs. This method contains the detailed code for * streaming the data and interpreting the stream. *//*from w w w .j a v a2 s. c o m*/ protected boolean execute() { logger.debug("SBE37Source.execute() called."); // do not execute the stream if there is no connection if (!isConnected()) return false; boolean failed = false; // while data are being sent, read them into the buffer try { this.socketChannel = getSocketConnection(); // create four byte placeholders used to evaluate up to a four-byte // window. The FIFO layout looks like: // ------------------------- // in ---> | One | Two |Three|Four | ---> out // ------------------------- byte byteOne = 0x00, // set initial placeholder values byteTwo = 0x00, byteThree = 0x00, byteFour = 0x00; // Create a buffer that will store the sample bytes as they are read ByteBuffer sampleBuffer = ByteBuffer.allocate(getBufferSize()); // create a byte buffer to store bytes from the TCP stream ByteBuffer buffer = ByteBuffer.allocateDirect(getBufferSize()); // create a character string to store characters from the TCP stream StringBuilder responseString = new StringBuilder(); // add a channel of data that will be pushed to the server. // Each sample will be sent to the Data Turbine as an rbnb frame. ChannelMap rbnbChannelMap = new ChannelMap(); int channelIndex = rbnbChannelMap.Add(getRBNBChannelName()); // wake the instrument with an initial take sample command this.command = this.commandPrefix + getInstrumentID() + "TS" + this.commandSuffix; this.sentCommand = queryInstrument(this.command); // verify the instrument ID is correct while (getInstrumentID() == null) { // allow time for the instrument response streamingThread.sleep(2000); buffer.clear(); // send the command and update the sentCommand status this.sentCommand = queryInstrument(this.command); // read the response into the buffer. Note that the streamed bytes // are 8-bit, not 16-bit Unicode characters. Use the US-ASCII // encoding instead. while (this.socketChannel.read(buffer) != -1 || buffer.position() > 0) { buffer.flip(); while (buffer.hasRemaining()) { String nextCharacter = new String(new byte[] { buffer.get() }, "US-ASCII"); responseString.append(nextCharacter); } // look for the command line ending if (responseString.toString().indexOf("S>") > 0) { // parse the ID from the idCommand response int idStartIndex = responseString.indexOf("=") + 2; int idStopIndex = responseString.indexOf("=") + 4; String idString = responseString.substring(idStartIndex, idStopIndex); // test that the ID is a valid number and set the instrument ID if ((new Integer(idString)).intValue() > 0) { setInstrumentID(idString); buffer.clear(); logger.debug("Instrument ID is " + getInstrumentID() + "."); break; } else { logger.debug("Instrument ID \"" + idString + "\" was not set."); } } else { break; } buffer.compact(); if (getInstrumentID() != null) { break; } } } // instrumentID is set // allow time for the instrument response streamingThread.sleep(5000); this.command = this.commandPrefix + getInstrumentID() + this.takeSampleCommand + this.commandSuffix; this.sentCommand = queryInstrument(command); // while there are bytes to read from the socket ... while (this.socketChannel.read(buffer) != -1 || buffer.position() > 0) { // prepare the buffer for reading buffer.flip(); // while there are unread bytes in the ByteBuffer while (buffer.hasRemaining()) { byteOne = buffer.get(); //logger.debug("b1: " + new String(Hex.encodeHex((new byte[]{byteOne}))) + "\t" + // "b2: " + new String(Hex.encodeHex((new byte[]{byteTwo}))) + "\t" + // "b3: " + new String(Hex.encodeHex((new byte[]{byteThree}))) + "\t" + // "b4: " + new String(Hex.encodeHex((new byte[]{byteFour}))) + "\t" + // "sample pos: " + sampleBuffer.position() + "\t" + // "sample rem: " + sampleBuffer.remaining() + "\t" + // "sample cnt: " + sampleByteCount + "\t" + // "buffer pos: " + buffer.position() + "\t" + // "buffer rem: " + buffer.remaining() + "\t" + // "state: " + state //); // Use a State Machine to process the byte stream. // Start building an rbnb frame for the entire sample, first by // inserting a timestamp into the channelMap. This time is merely // the time of insert into the data turbine, not the time of // observations of the measurements. That time should be parsed out // of the sample in the Sink client code switch (state) { case 0: // sample line is begun by S> // note bytes are in reverse order in the FIFO window if (byteOne == 0x3E && byteTwo == 0x53) { // we've found the beginning of a sample, move on state = 1; break; } else { break; } case 1: // read the rest of the bytes to the next EOL characters // sample line is terminated by S> // note bytes are in reverse order in the FIFO window if (byteOne == 0x3E && byteTwo == 0x53) { sampleByteCount++; // add the last byte found to the count // add the last byte found to the sample buffer if (sampleBuffer.remaining() > 0) { sampleBuffer.put(byteOne); } else { sampleBuffer.compact(); sampleBuffer.put(byteOne); } // extract just the length of the sample bytes (less 2 bytes // to exclude the 'S>' prompt characters) out of the // sample buffer, and place it in the channel map as a // byte array. Then, send it to the data turbine. byte[] sampleArray = new byte[sampleByteCount - 2]; sampleBuffer.flip(); sampleBuffer.get(sampleArray); // send the sample to the data turbine rbnbChannelMap.PutTimeAuto("server"); String sampleString = new String(sampleArray, "US-ASCII"); rbnbChannelMap.PutMime(channelIndex, "text/plain"); rbnbChannelMap.PutDataAsString(channelIndex, sampleString); getSource().Flush(rbnbChannelMap); logger.info("Sample: " + sampleString); logger.info("flushed data to the DataTurbine. "); byteOne = 0x00; byteTwo = 0x00; byteThree = 0x00; byteFour = 0x00; sampleBuffer.clear(); sampleByteCount = 0; //rbnbChannelMap.Clear(); //logger.debug("Cleared b1,b2,b3,b4. Cleared sampleBuffer. Cleared rbnbChannelMap."); //state = 0; // Once the sample is flushed, take a new sample if (getInstrumentID() != null) { // allow time for the instrument response streamingThread.sleep(2000); this.command = this.commandPrefix + getInstrumentID() + this.takeSampleCommand + this.commandSuffix; this.sentCommand = queryInstrument(command); } } else { // not 0x0A0D // still in the middle of the sample, keep adding bytes sampleByteCount++; // add each byte found if (sampleBuffer.remaining() > 0) { sampleBuffer.put(byteOne); } else { sampleBuffer.compact(); logger.debug("Compacting sampleBuffer ..."); sampleBuffer.put(byteOne); } break; } // end if for 0x0A0D EOL } // end switch statement // shift the bytes in the FIFO window byteFour = byteThree; byteThree = byteTwo; byteTwo = byteOne; } //end while (more unread bytes) // prepare the buffer to read in more bytes from the stream buffer.compact(); } // end while (more socket bytes to read) this.socketChannel.close(); } catch (IOException e) { // handle exceptions // In the event of an i/o exception, log the exception, and allow execute() // to return false, which will prompt a retry. failed = true; e.printStackTrace(); return !failed; } catch (SAPIException sapie) { // In the event of an RBNB communication exception, log the exception, // and allow execute() to return false, which will prompt a retry. failed = true; sapie.printStackTrace(); return !failed; } catch (java.lang.InterruptedException ie) { ie.printStackTrace(); } return !failed; }
From source file:edu.hawaii.soest.kilonalu.adcp.ADCPSource.java
/** * A method that executes the streaming of data from the source to the RBNB * server after all configuration of settings, connections to hosts, and * thread initiatizing occurs. This method contains the detailed code for * streaming the data and interpreting the stream. *///from w w w . j a va 2s . co m protected boolean execute() { // do not execute the stream if there is no connection if (!isConnected()) return false; boolean failed = false; SocketChannel socket = getSocketConnection(); // while data are being sent, read them into the buffer try { // create four byte placeholders used to evaluate up to a four-byte // window. The FIFO layout looks like: // ------------------------- // in ---> | One | Two |Three|Four | ---> out // ------------------------- byte byteOne = 0x00, // set initial placeholder values byteTwo = 0x00, byteThree = 0x00, byteFour = 0x00; // Create a buffer that will store the ensemble bytes as they are read ByteBuffer ensembleBuffer = ByteBuffer.allocate(getBufferSize()); // create a byte buffer to store bytes from the TCP stream ByteBuffer buffer = ByteBuffer.allocateDirect(getBufferSize()); // add a channel of data that will be pushed to the server. // Each ensemble will be sent to the Data Turbine as an rbnb frame. ChannelMap rbnbChannelMap = new ChannelMap(); int channelIndex = rbnbChannelMap.Add(getRBNBChannelName()); // while there are bytes to read from the socket ... while (socket.read(buffer) != -1 || buffer.position() > 0) { // prepare the buffer for reading buffer.flip(); // while there are unread bytes in the ByteBuffer while (buffer.hasRemaining()) { byteOne = buffer.get(); // Use a State Machine to process the byte stream. // Start building an rbnb frame for the entire ensemble, first by // inserting a timestamp into the channelMap. This time is merely // the time of insert into the data turbine, not the time of // observations of the measurements. That time should be parsed out // of the ensemble in the Sink client code System.out.print("\rProcessed byte # " + ensembleByteCount + " " + new String(Hex.encodeHex((new byte[] { byteOne }))) + " - log msg is: "); switch (state) { case 0: // find ensemble header id if (byteOne == 0x7F && byteTwo == 0x7F) { ensembleByteCount++; // add Header ID ensembleChecksum += (byteTwo & 0xFF); ensembleByteCount++; // add Data Source ID ensembleChecksum += (byteOne & 0xFF); state = 1; break; } else { break; } case 1: // find the Ensemble Length (LSB) ensembleByteCount++; // add Ensemble Byte Count (LSB) ensembleChecksum += (byteOne & 0xFF); state = 2; break; case 2: // find the Ensemble Length (MSB) ensembleByteCount++; // add Ensemble Byte Count (MSB) ensembleChecksum += (byteOne & 0xFF); int upperEnsembleByte = (byteOne & 0xFF) << 8; int lowerEnsembleByte = (byteTwo & 0xFF); ensembleBytes = upperEnsembleByte + lowerEnsembleByte; logger.debug("Number of Bytes in the Ensemble: " + ensembleBytes); if (ensembleBuffer.remaining() > 0) { ensembleBuffer.put(byteFour); ensembleBuffer.put(byteThree); ensembleBuffer.put(byteTwo); ensembleBuffer.put(byteOne); } else { ensembleBuffer.compact(); ensembleBuffer.put(byteFour); ensembleBuffer.put(byteThree); ensembleBuffer.put(byteTwo); ensembleBuffer.put(byteOne); } state = 3; break; // verify that the header is real, not a random 0x7F7F case 3: // find the number of data types in the ensemble // set the numberOfDataTypes byte if (ensembleByteCount == NUMBER_OF_DATA_TYPES_OFFSET - 1) { ensembleByteCount++; ensembleChecksum += (byteOne & 0xFF); numberOfDataTypes = (byteOne & 0xFF); // calculate the number of bytes to the Fixed Leader ID dataTypeOneOffset = 6 + (2 * numberOfDataTypes); if (ensembleBuffer.remaining() > 0) { ensembleBuffer.put(byteOne); } else { ensembleBuffer.compact(); ensembleBuffer.put(byteOne); } state = 4; break; } else { ensembleByteCount++; ensembleChecksum += (byteOne & 0xFF); if (ensembleBuffer.remaining() > 0) { ensembleBuffer.put(byteOne); } else { ensembleBuffer.compact(); ensembleBuffer.put(byteOne); } break; } case 4: // find the offset to data type #1 and verify the header ID if ((ensembleByteCount == dataTypeOneOffset + 1) && byteOne == 0x00 && byteTwo == 0x00) { ensembleByteCount++; ensembleChecksum += (byteOne & 0xFF); // we are confident that the previous sequence of 0x7F7F is truly // an headerID and not a random occurrence in the stream because // we have identified the Fixed Leader ID (0x0000) the correct // number of bytes beyond the 0x7F7F headerIsVerified = true; if (ensembleBuffer.remaining() > 0) { ensembleBuffer.put(byteOne); } else { ensembleBuffer.compact(); ensembleBuffer.put(byteOne); } state = 5; break; } else { if (ensembleByteCount > dataTypeOneOffset + 1) { // We've hit a random 0x7F7F byte sequence that is not a true // ensemble header id. Reset the processing and look for the // next 0x7F7F sequence in the stream ensembleByteCount = 0; ensembleChecksum = 0; dataTypeOneOffset = 0; numberOfDataTypes = 0; headerIsVerified = false; ensembleBuffer.clear(); rbnbChannelMap.Clear(); channelIndex = rbnbChannelMap.Add(getRBNBChannelName()); byteOne = 0x00; byteTwo = 0x00; byteThree = 0x00; byteFour = 0x00; state = 0; if (ensembleBuffer.remaining() > 0) { ensembleBuffer.put(byteOne); } else { ensembleBuffer.compact(); ensembleBuffer.put(byteOne); } break; } else { // We are still parsing bytes between the purported header ID // and fixed leader ID. Keep parsing until we hit the fixed // leader ID, or until we are greater than the dataTypeOneOffset // stated value. ensembleByteCount++; ensembleChecksum += (byteOne & 0xFF); if (ensembleBuffer.remaining() > 0) { ensembleBuffer.put(byteOne); } else { ensembleBuffer.compact(); ensembleBuffer.put(byteOne); } break; } } case 5: // read the rest of the bytes to the next Header ID // if we've made it to the next ensemble's header id, prepare to // flush the data. Also check that the calculated byte count // is greater than the recorded byte count in case of finding an // arbitrary 0x7f 0x7f sequence in the data stream if (byteOne == 0x7F && byteTwo == 0x7F && (ensembleByteCount == ensembleBytes + 3) && headerIsVerified) { // remove the last bytes from the count (byteOne and byteTwo) ensembleByteCount -= 1; // remove the last three bytes from the checksum: // the two checksum bytes are not included, and the two 0x7f //bytes belong to the next ensemble, and one of them was // previously added. Reset the buffer position due to this too. //ensembleChecksum -= (byteOne & 0xFF); ensembleChecksum -= (byteTwo & 0xFF); ensembleChecksum -= (byteThree & 0xFF); ensembleChecksum -= (byteFour & 0xFF); // We are consistently 1 byte over in the checksum. Trim it. We need to // troubleshoot why this is. CSJ 12/18/2007 ensembleChecksum = ensembleChecksum - 1; // jockey byteThree into LSB, byteFour into MSB int upperChecksumByte = (byteThree & 0xFF) << 8; int lowerChecksumByte = (byteFour & 0xFF); int trueChecksum = upperChecksumByte + lowerChecksumByte; if (ensembleBuffer.remaining() > 0) { ensembleBuffer.put((byte) lowerChecksumByte); ensembleBuffer.put((byte) (upperChecksumByte >> 8)); } else { ensembleBuffer.compact(); ensembleBuffer.put((byte) lowerChecksumByte); ensembleBuffer.put((byte) (upperChecksumByte >> 8)); } // check if the calculated checksum (modulo 65535) is equal // to the true checksum; if so, flush to the data turbine // Also, if the checksums are off by 1 byte, also flush the // data. We need to troubleshoot this bug CSJ 06/11/2008 if (((ensembleChecksum % 65535) == trueChecksum) || ((ensembleChecksum + 1) % 65535 == trueChecksum) || ((ensembleChecksum - 1) % 65535 == trueChecksum)) { // extract just the length of the ensemble bytes out of the // ensemble buffer, and place it in the channel map as a // byte array. Then, send it to the data turbine. byte[] ensembleArray = new byte[ensembleByteCount]; ensembleBuffer.flip(); ensembleBuffer.get(ensembleArray); // send the ensemble to the data turbine rbnbChannelMap.PutTimeAuto("server"); rbnbChannelMap.PutDataAsByteArray(channelIndex, ensembleArray); getSource().Flush(rbnbChannelMap); logger.debug("flushed: " + ensembleByteCount + " " + "ens cksum: " + ensembleChecksum + "\t\t" + "ens pos: " + ensembleBuffer.position() + "\t" + "ens rem: " + ensembleBuffer.remaining() + "\t" + "buf pos: " + buffer.position() + "\t" + "buf rem: " + buffer.remaining() + "\t" + "state: " + state); logger.info("Sent ADCP ensemble to the data turbine."); // only clear all four bytes if we are not one or two bytes // from the end of the byte buffer (i.e. the header id // is split or is all in the previous buffer) if (byteOne == 0x7f && byteTwo == 0x7f && ensembleByteCount > ensembleBytes && buffer.position() == 0) { byteThree = 0x00; byteFour = 0x00; logger.debug("Cleared ONLY b3, b4."); } else if (byteOne == 0x7f && ensembleByteCount > ensembleBytes && buffer.position() == 1) { buffer.position(buffer.position() - 1); byteTwo = 0x00; byteThree = 0x00; byteFour = 0x00; logger.debug("Cleared ONLY b2, b3, b4."); } else { byteOne = 0x00; byteTwo = 0x00; byteThree = 0x00; byteFour = 0x00; logger.debug("Cleared ALL b1, b2, b3, b4."); } //rewind the position to before the next ensemble's header id if (buffer.position() >= 2) { buffer.position(buffer.position() - 2); logger.debug("Moved position back two, now: " + buffer.position()); } ensembleBuffer.clear(); ensembleByteCount = 0; ensembleBytes = 0; ensembleChecksum = 0; state = 0; break; } else { // The checksums don't match, move on logger.info("not equal: " + "calc chksum: " + (ensembleChecksum % 65535) + "\tens chksum: " + trueChecksum + "\tbuf pos: " + buffer.position() + "\tbuf rem: " + buffer.remaining() + "\tens pos: " + ensembleBuffer.position() + "\tens rem: " + ensembleBuffer.remaining() + "\tstate: " + state); rbnbChannelMap.Clear(); channelIndex = rbnbChannelMap.Add(getRBNBChannelName()); ensembleBuffer.clear(); ensembleByteCount = 0; ensembleChecksum = 0; ensembleBuffer.clear(); state = 0; break; } } else { // still in the middle of the ensemble, keep adding bytes ensembleByteCount++; // add each byte found ensembleChecksum += (byteOne & 0xFF); if (ensembleBuffer.remaining() > 0) { ensembleBuffer.put(byteOne); } else { ensembleBuffer.compact(); ensembleBuffer.put(byteOne); } break; } } // shift the bytes in the FIFO window byteFour = byteThree; byteThree = byteTwo; byteTwo = byteOne; logger.debug("remaining:\t" + buffer.remaining() + "\tstate:\t" + state + "\tens byte count:\t" + ensembleByteCount + "\tens bytes:\t" + ensembleBytes + "\tver:\t" + headerIsVerified + "\tbyte value:\t" + new String(Hex.encodeHex((new byte[] { byteOne })))); } //end while (more unread bytes) // prepare the buffer to read in more bytes from the stream buffer.compact(); } // end while (more socket bytes to read) socket.close(); } catch (IOException e) { // handle exceptions // In the event of an i/o exception, log the exception, and allow execute() // to return false, which will prompt a retry. failed = true; e.printStackTrace(); return !failed; } catch (SAPIException sapie) { // In the event of an RBNB communication exception, log the exception, // and allow execute() to return false, which will prompt a retry. failed = true; sapie.printStackTrace(); return !failed; } return !failed; }
From source file:edu.hawaii.soest.pacioos.text.SocketTextSource.java
@Override protected boolean execute() { log.debug("SocketTextSource.execute() called."); // do not execute the stream if there is no connection if (!isConnected()) return false; boolean failed = false; /* Get a connection to the instrument */ SocketChannel socket = getSocketConnection(); if (socket == null) return false; // while data are being sent, read them into the buffer try {/*www. j ava 2s. co m*/ // create four byte placeholders used to evaluate up to a four-byte // window. The FIFO layout looks like: // ------------------------- // in ---> | One | Two |Three|Four | ---> out // ------------------------- byte byteOne = 0x00, // set initial placeholder values byteTwo = 0x00, byteThree = 0x00, byteFour = 0x00; // Create a buffer that will store the sample bytes as they are read ByteBuffer sampleBuffer = ByteBuffer.allocate(getBufferSize()); // create a byte buffer to store bytes from the TCP stream ByteBuffer buffer = ByteBuffer.allocateDirect(getBufferSize()); // while there are bytes to read from the socket ... while (socket.read(buffer) != -1 || buffer.position() > 0) { // prepare the buffer for reading buffer.flip(); // while there are unread bytes in the ByteBuffer while (buffer.hasRemaining()) { byteOne = buffer.get(); // log the byte stream String character = new String(new byte[] { byteOne }); if (log.isDebugEnabled()) { List<Byte> whitespaceBytes = new ArrayList<Byte>(); whitespaceBytes.add(new Byte((byte) 0x0A)); whitespaceBytes.add(new Byte((byte) 0x0D)); if (whitespaceBytes.contains(new Byte(byteOne))) { character = new String(Hex.encodeHex((new byte[] { byteOne }))); } } log.debug("char: " + character + "\t" + "b1: " + new String(Hex.encodeHex((new byte[] { byteOne }))) + "\t" + "b2: " + new String(Hex.encodeHex((new byte[] { byteTwo }))) + "\t" + "b3: " + new String(Hex.encodeHex((new byte[] { byteThree }))) + "\t" + "b4: " + new String(Hex.encodeHex((new byte[] { byteFour }))) + "\t" + "sample pos: " + sampleBuffer.position() + "\t" + "sample rem: " + sampleBuffer.remaining() + "\t" + "sample cnt: " + sampleByteCount + "\t" + "buffer pos: " + buffer.position() + "\t" + "buffer rem: " + buffer.remaining() + "\t" + "state: " + state); // evaluate each byte to find the record delimiter(s), and when found, validate and // send the sample to the DataTurbine. int numberOfChannelsFlushed = 0; if (getRecordDelimiters().length == 2) { // have we hit the delimiters in the stream yet? if (byteTwo == getFirstDelimiterByte() && byteOne == getSecondDelimiterByte()) { sampleBuffer.put(byteOne); sampleByteCount++; // extract just the length of the sample bytes out of the // sample buffer, and place it in the channel map as a // byte array. Then, send it to the DataTurbine. log.debug("Sample byte count: " + sampleByteCount); byte[] sampleArray = new byte[sampleByteCount]; sampleBuffer.flip(); sampleBuffer.get(sampleArray); String sampleString = new String(sampleArray, "US-ASCII"); if (validateSample(sampleString)) { numberOfChannelsFlushed = sendSample(sampleString); } sampleBuffer.clear(); sampleByteCount = 0; byteOne = 0x00; byteTwo = 0x00; byteThree = 0x00; byteFour = 0x00; log.debug("Cleared b1,b2,b3,b4. Cleared sampleBuffer. Cleared rbnbChannelMap."); } else { // still in the middle of the sample, keep adding bytes sampleByteCount++; // add each byte found if (sampleBuffer.remaining() > 0) { sampleBuffer.put(byteOne); } else { sampleBuffer.compact(); log.debug("Compacting sampleBuffer ..."); sampleBuffer.put(byteOne); } } } else if (getRecordDelimiters().length == 1) { // have we hit the delimiter in the stream yet? if (byteOne == getFirstDelimiterByte()) { sampleBuffer.put(byteOne); sampleByteCount++; // extract just the length of the sample bytes out of the // sample buffer, and place it in the channel map as a // byte array. Then, send it to the DataTurbine. byte[] sampleArray = new byte[sampleByteCount]; sampleBuffer.flip(); sampleBuffer.get(sampleArray); String sampleString = new String(sampleArray, "US-ASCII"); if (validateSample(sampleString)) { numberOfChannelsFlushed = sendSample(sampleString); } sampleBuffer.clear(); sampleByteCount = 0; byteOne = 0x00; byteTwo = 0x00; byteThree = 0x00; byteFour = 0x00; log.debug("Cleared b1,b2,b3,b4. Cleared sampleBuffer. Cleared rbnbChannelMap."); } else { // still in the middle of the sample, keep adding bytes sampleByteCount++; // add each byte found if (sampleBuffer.remaining() > 0) { sampleBuffer.put(byteOne); } else { sampleBuffer.compact(); log.debug("Compacting sampleBuffer ..."); sampleBuffer.put(byteOne); } } } // end getRecordDelimiters().length // shift the bytes in the FIFO window byteFour = byteThree; byteThree = byteTwo; byteTwo = byteOne; } //end while (more unread bytes) // prepare the buffer to read in more bytes from the stream buffer.compact(); } // end while (more socket bytes to read) socket.close(); } catch (IOException e) { // handle exceptions // In the event of an i/o exception, log the exception, and allow execute() // to return false, which will prompt a retry. failed = true; log.error("There was a communication error in sending the data sample. The message was: " + e.getMessage()); if (log.isDebugEnabled()) { e.printStackTrace(); } return !failed; } catch (SAPIException sapie) { // In the event of an RBNB communication exception, log the exception, // and allow execute() to return false, which will prompt a retry. failed = true; log.error("There was an RBNB error while sending the data sample. The message was: " + sapie.getMessage()); if (log.isDebugEnabled()) { sapie.printStackTrace(); } return !failed; } return !failed; }
From source file:edu.hawaii.soest.kilonalu.flntu.FLNTUSource.java
/** * A method that executes the streaming of data from the source to the RBNB * server after all configuration of settings, connections to hosts, and * thread initiatizing occurs. This method contains the detailed code for * streaming the data and interpreting the stream. *///w w w .ja v a 2s . co m protected boolean execute() { logger.debug("FLNTUSource.execute() called."); // do not execute the stream if there is no connection if (!isConnected()) return false; boolean failed = false; SocketChannel socket = getSocketConnection(); // while data are being sent, read them into the buffer try { // create four byte placeholders used to evaluate up to a four-byte // window. The FIFO layout looks like: // ------------------------- // in ---> | One | Two |Three|Four | ---> out // ------------------------- byte byteOne = 0x00, // set initial placeholder values byteTwo = 0x00, byteThree = 0x00, byteFour = 0x00; // Create a buffer that will store the sample bytes as they are read ByteBuffer sampleBuffer = ByteBuffer.allocate(getBufferSize()); // create a byte buffer to store bytes from the TCP stream ByteBuffer buffer = ByteBuffer.allocateDirect(getBufferSize()); // add a channel of data that will be pushed to the server. // Each sample will be sent to the Data Turbine as an rbnb frame. ChannelMap rbnbChannelMap = new ChannelMap(); // while there are bytes to read from the socket ... while (socket.read(buffer) != -1 || buffer.position() > 0) { // prepare the buffer for reading buffer.flip(); // while there are unread bytes in the ByteBuffer while (buffer.hasRemaining()) { byteOne = buffer.get(); logger.debug("char: " + (char) byteOne + "\t" + "b1: " + new String(Hex.encodeHex((new byte[] { byteOne }))) + "\t" + "b2: " + new String(Hex.encodeHex((new byte[] { byteTwo }))) + "\t" + "b3: " + new String(Hex.encodeHex((new byte[] { byteThree }))) + "\t" + "b4: " + new String(Hex.encodeHex((new byte[] { byteFour }))) + "\t" + "sample pos: " + sampleBuffer.position() + "\t" + "sample rem: " + sampleBuffer.remaining() + "\t" + "sample cnt: " + sampleByteCount + "\t" + "buffer pos: " + buffer.position() + "\t" + "buffer rem: " + buffer.remaining() + "\t" + "state: " + state); // Use a State Machine to process the byte stream. switch (state) { case 0: // sample sets begin with 'mvs 1\r\n' and end with 'mvs 0\r\n'. Find the // beginning of the sample set using the 4-byte window (s 1\r\n) // note bytes are in reverse order in the FIFO window if (byteOne == 0x0A && byteTwo == 0x0D && byteThree == 0x31 && byteFour == 0x20) { // we've found the beginning of a sample set, move on state = 1; break; } else { break; } case 1: // read the rest of the bytes to the next EOL characters // sample line is terminated by record delimiter byte (\r\n) // note bytes are in reverse order in the FIFO window if (byteOne == 0x0A && byteTwo == 0x0D && byteThree == 0x30 && byteFour == 0x20) { // we've found the sample set ending, clear buffers and return // to state 0 to wait for the next set byteOne = 0x00; byteTwo = 0x00; byteThree = 0x00; byteFour = 0x00; sampleBuffer.clear(); sampleByteCount = 0; rbnbChannelMap.Clear(); logger.debug("Cleared b1,b2,b3,b4. Cleared sampleBuffer. Cleared rbnbChannelMap."); state = 0; // if we're not at the sample set end, look for individual samples } else if (byteOne == 0x0A && byteTwo == 0x0D) { // found the sample ending delimiter // add in the sample delimiter to the sample buffer if (sampleBuffer.remaining() > 0) { sampleBuffer.put(byteOne); sampleByteCount++; } else { sampleBuffer.compact(); logger.debug("Compacting sampleBuffer ..."); sampleBuffer.put(byteOne); sampleByteCount++; } // extract just the length of the sample bytes out of the // sample buffer, and place it in the channel map as a // byte array. Then, send it to the data turbine. byte[] sampleArray = new byte[sampleByteCount]; sampleBuffer.flip(); sampleBuffer.get(sampleArray); // send the sample to the data turbine rbnbChannelMap.PutTimeAuto("server"); String sampleString = new String(sampleArray, "US-ASCII"); int channelIndex = rbnbChannelMap.Add(getRBNBChannelName()); rbnbChannelMap.PutMime(channelIndex, "text/plain"); rbnbChannelMap.PutDataAsString(channelIndex, sampleString); getSource().Flush(rbnbChannelMap); logger.info("Sample: " + sampleString.substring(0, sampleString.length() - 2) + " sent data to the DataTurbine. "); byteOne = 0x00; byteTwo = 0x00; byteThree = 0x00; byteFour = 0x00; sampleBuffer.clear(); sampleByteCount = 0; rbnbChannelMap.Clear(); logger.debug("Cleared b1,b2,b3,b4. Cleared sampleBuffer. Cleared rbnbChannelMap."); break; } else { // not 0x0 // still in the middle of the sample, keep adding bytes sampleByteCount++; // add each byte found if (sampleBuffer.remaining() > 0) { sampleBuffer.put(byteOne); } else { sampleBuffer.compact(); logger.debug("Compacting sampleBuffer ..."); sampleBuffer.put(byteOne); } break; } // end if for 0x0D20 EOL } // end switch statement // shift the bytes in the FIFO window byteFour = byteThree; byteThree = byteTwo; byteTwo = byteOne; } //end while (more unread bytes) // prepare the buffer to read in more bytes from the stream buffer.compact(); } // end while (more socket bytes to read) socket.close(); } catch (IOException e) { // handle exceptions // In the event of an i/o exception, log the exception, and allow execute() // to return false, which will prompt a retry. failed = true; e.printStackTrace(); return !failed; } catch (SAPIException sapie) { // In the event of an RBNB communication exception, log the exception, // and allow execute() to return false, which will prompt a retry. failed = true; sapie.printStackTrace(); return !failed; } return !failed; }
From source file:hivemall.recommend.SlimUDTF.java
private void runIterativeTraining() throws HiveException { final ByteBuffer buf = this._inputBuf; final NioStatefulSegment dst = this._fileIO; assert (buf != null); assert (dst != null); final Reporter reporter = getReporter(); final Counters.Counter iterCounter = (reporter == null) ? null : reporter.getCounter("hivemall.recommend.slim$Counter", "iteration"); try {//from ww w . j a va 2 s . co m if (dst.getPosition() == 0L) {// run iterations w/o temporary file if (buf.position() == 0) { return; // no training example } buf.flip(); for (int iter = 2; iter < numIterations; iter++) { _cvState.next(); reportProgress(reporter); setCounterValue(iterCounter, iter); while (buf.remaining() > 0) { int recordBytes = buf.getInt(); assert (recordBytes > 0) : recordBytes; replayTrain(buf); } buf.rewind(); if (_cvState.isConverged(_observedTrainingExamples)) { break; } } logger.info("Performed " + _cvState.getCurrentIteration() + " iterations of " + NumberUtils.formatNumber(_observedTrainingExamples) + " training examples on memory (thus " + NumberUtils.formatNumber(_observedTrainingExamples * _cvState.getCurrentIteration()) + " training updates in total) "); } else { // read training examples in the temporary file and invoke train for each example // write KNNi in buffer to a temporary file if (buf.remaining() > 0) { writeBuffer(buf, dst); } try { dst.flush(); } catch (IOException e) { throw new HiveException("Failed to flush a file: " + dst.getFile().getAbsolutePath(), e); } if (logger.isInfoEnabled()) { File tmpFile = dst.getFile(); logger.info("Wrote KNN entries of axis items to a temporary file for iterative training: " + tmpFile.getAbsolutePath() + " (" + FileUtils.prettyFileSize(tmpFile) + ")"); } // run iterations for (int iter = 2; iter < numIterations; iter++) { _cvState.next(); setCounterValue(iterCounter, iter); buf.clear(); dst.resetPosition(); while (true) { reportProgress(reporter); // load a KNNi to a buffer in the temporary file final int bytesRead; try { bytesRead = dst.read(buf); } catch (IOException e) { throw new HiveException("Failed to read a file: " + dst.getFile().getAbsolutePath(), e); } if (bytesRead == 0) { // reached file EOF break; } assert (bytesRead > 0) : bytesRead; // reads training examples from a buffer buf.flip(); int remain = buf.remaining(); if (remain < SizeOf.INT) { throw new HiveException("Illegal file format was detected"); } while (remain >= SizeOf.INT) { int pos = buf.position(); int recordBytes = buf.getInt(); remain -= SizeOf.INT; if (remain < recordBytes) { buf.position(pos); break; } replayTrain(buf); remain -= recordBytes; } buf.compact(); } if (_cvState.isConverged(_observedTrainingExamples)) { break; } } logger.info("Performed " + _cvState.getCurrentIteration() + " iterations of " + NumberUtils.formatNumber(_observedTrainingExamples) + " training examples on memory and KNNi data on secondary storage (thus " + NumberUtils.formatNumber(_observedTrainingExamples * _cvState.getCurrentIteration()) + " training updates in total) "); } } catch (Throwable e) { throw new HiveException("Exception caused in the iterative training", e); } finally { // delete the temporary file and release resources try { dst.close(true); } catch (IOException e) { throw new HiveException("Failed to close a file: " + dst.getFile().getAbsolutePath(), e); } this._inputBuf = null; this._fileIO = null; } }
From source file:hivemall.mf.OnlineMatrixFactorizationUDTF.java
protected final void runIterativeTraining(@Nonnegative final int iterations) throws HiveException { final ByteBuffer inputBuf = this.inputBuf; final NioFixedSegment fileIO = this.fileIO; assert (inputBuf != null); assert (fileIO != null); final long numTrainingExamples = count; final Reporter reporter = getReporter(); final Counter iterCounter = (reporter == null) ? null : reporter.getCounter("hivemall.mf.MatrixFactorization$Counter", "iteration"); try {//from www.ja va 2 s .co m if (lastWritePos == 0) {// run iterations w/o temporary file if (inputBuf.position() == 0) { return; // no training example } inputBuf.flip(); int iter = 2; for (; iter <= iterations; iter++) { reportProgress(reporter); setCounterValue(iterCounter, iter); while (inputBuf.remaining() > 0) { int user = inputBuf.getInt(); int item = inputBuf.getInt(); double rating = inputBuf.getDouble(); // invoke train count++; train(user, item, rating); } cvState.multiplyLoss(0.5d); if (cvState.isConverged(iter, numTrainingExamples)) { break; } inputBuf.rewind(); } logger.info("Performed " + Math.min(iter, iterations) + " iterations of " + NumberUtils.formatNumber(numTrainingExamples) + " training examples on memory (thus " + NumberUtils.formatNumber(count) + " training updates in total) "); } else {// read training examples in the temporary file and invoke train for each example // write training examples in buffer to a temporary file if (inputBuf.position() > 0) { writeBuffer(inputBuf, fileIO, lastWritePos); } else if (lastWritePos == 0) { return; // no training example } try { fileIO.flush(); } catch (IOException e) { throw new HiveException("Failed to flush a file: " + fileIO.getFile().getAbsolutePath(), e); } if (logger.isInfoEnabled()) { File tmpFile = fileIO.getFile(); logger.info( "Wrote " + numTrainingExamples + " records to a temporary file for iterative training: " + tmpFile.getAbsolutePath() + " (" + FileUtils.prettyFileSize(tmpFile) + ")"); } // run iterations int iter = 2; for (; iter <= iterations; iter++) { setCounterValue(iterCounter, iter); inputBuf.clear(); long seekPos = 0L; while (true) { reportProgress(reporter); // TODO prefetch // writes training examples to a buffer in the temporary file final int bytesRead; try { bytesRead = fileIO.read(seekPos, inputBuf); } catch (IOException e) { throw new HiveException("Failed to read a file: " + fileIO.getFile().getAbsolutePath(), e); } if (bytesRead == 0) { // reached file EOF break; } assert (bytesRead > 0) : bytesRead; seekPos += bytesRead; // reads training examples from a buffer inputBuf.flip(); int remain = inputBuf.remaining(); assert (remain > 0) : remain; for (; remain >= RECORD_BYTES; remain -= RECORD_BYTES) { int user = inputBuf.getInt(); int item = inputBuf.getInt(); double rating = inputBuf.getDouble(); // invoke train count++; train(user, item, rating); } inputBuf.compact(); } cvState.multiplyLoss(0.5d); if (cvState.isConverged(iter, numTrainingExamples)) { break; } } logger.info("Performed " + Math.min(iter, iterations) + " iterations of " + NumberUtils.formatNumber(numTrainingExamples) + " training examples using a secondary storage (thus " + NumberUtils.formatNumber(count) + " training updates in total)"); } } finally { // delete the temporary file and release resources try { fileIO.close(true); } catch (IOException e) { throw new HiveException("Failed to close a file: " + fileIO.getFile().getAbsolutePath(), e); } this.inputBuf = null; this.fileIO = null; } }