List of usage examples for java.nio ByteBuffer allocateDirect
public static ByteBuffer allocateDirect(int capacity)
From source file:hivemall.fm.FactorizationMachineUDTF.java
protected void recordTrain(@Nonnull final Feature[] x, final double y) throws HiveException { if (_iterations <= 1) { return;/*from w w w. ja v a2 s. co m*/ } ByteBuffer inputBuf = _inputBuf; NioStatefullSegment dst = _fileIO; if (inputBuf == null) { final File file; try { file = File.createTempFile("hivemall_fm", ".sgmt"); file.deleteOnExit(); if (!file.canWrite()) { throw new UDFArgumentException("Cannot write a temporary file: " + file.getAbsolutePath()); } LOG.info("Record training examples to a file: " + file.getAbsolutePath()); } catch (IOException ioe) { throw new UDFArgumentException(ioe); } catch (Throwable e) { throw new UDFArgumentException(e); } this._inputBuf = inputBuf = ByteBuffer.allocateDirect(1024 * 1024); // 1 MiB this._fileIO = dst = new NioStatefullSegment(file, false); } int xBytes = Feature.requiredBytes(x); int recordBytes = (Integer.SIZE + Double.SIZE) / 8 + xBytes; int requiredBytes = (Integer.SIZE / 8) + recordBytes; int remain = inputBuf.remaining(); if (remain < requiredBytes) { writeBuffer(inputBuf, dst); } inputBuf.putInt(recordBytes); inputBuf.putInt(x.length); for (Feature f : x) { f.writeTo(inputBuf); } inputBuf.putDouble(y); }
From source file:hivemall.recommend.SlimUDTF.java
private void recordTrainingInput(final int itemI, @Nonnull final Int2ObjectMap<Int2FloatMap> knnItems, final int numKNNItems) throws HiveException { ByteBuffer buf = this._inputBuf; NioStatefulSegment dst = this._fileIO; if (buf == null) { // invoke only at task node (initialize is also invoked in compilation) final File file; try {//from w w w . ja va2 s. c om file = File.createTempFile("hivemall_slim", ".sgmt"); // to save KNN data file.deleteOnExit(); if (!file.canWrite()) { throw new UDFArgumentException("Cannot write a temporary file: " + file.getAbsolutePath()); } } catch (IOException ioe) { throw new UDFArgumentException(ioe); } this._inputBuf = buf = ByteBuffer.allocateDirect(8 * 1024 * 1024); // 8MB this._fileIO = dst = new NioStatefulSegment(file, false); } int recordBytes = SizeOf.INT + SizeOf.INT + SizeOf.INT * 2 * knnItems.size() + (SizeOf.INT + SizeOf.FLOAT) * numKNNItems; int requiredBytes = SizeOf.INT + recordBytes; // need to allocate space for "recordBytes" itself int remain = buf.remaining(); if (remain < requiredBytes) { writeBuffer(buf, dst); } buf.putInt(recordBytes); buf.putInt(itemI); buf.putInt(knnItems.size()); for (Int2ObjectMap.Entry<Int2FloatMap> e1 : Fastutil.fastIterable(knnItems)) { int user = e1.getIntKey(); buf.putInt(user); Int2FloatMap ru = e1.getValue(); buf.putInt(ru.size()); for (Int2FloatMap.Entry e2 : Fastutil.fastIterable(ru)) { buf.putInt(e2.getIntKey()); buf.putFloat(e2.getFloatValue()); } } }
From source file:edu.hawaii.soest.kilonalu.dvp2.DavisWxSource.java
/** * A method that executes the streaming of data from the source to the RBNB * server after all configuration of settings, connections to hosts, and * thread initiatizing occurs. This method contains the detailed code for * streaming the data and interpreting the stream. *///from w w w. ja va2 s . c o m protected boolean execute() { logger.debug("DavisWxSource.execute() called."); // do not execute the stream if there is no connection if (!isConnected()) return false; boolean failed = false; // while data are being sent, read them into the buffer try { this.socketChannel = getSocketConnection(); // create four byte placeholders used to evaluate up to a four-byte // window. The FIFO layout looks like: // ------------------------- // in ---> | One | Two |Three|Four | ---> out // ------------------------- byte byteOne = 0x00, // set initial placeholder values byteTwo = 0x00, byteThree = 0x00, byteFour = 0x00; // Create a buffer that will store the sample bytes as they are read ByteBuffer sampleBuffer = ByteBuffer.allocate(getBufferSize()); // create a byte buffer to store bytes from the TCP stream ByteBuffer buffer = ByteBuffer.allocateDirect(getBufferSize()); // add a channel of data that will be pushed to the server. // Each sample will be sent to the Data Turbine as an rbnb frame. ChannelMap rbnbChannelMap = new ChannelMap(); int channelIndex = 0; // add the raw binary LOOP packet data //channelIndex = rbnbChannelMap.Add(getRBNBChannelName()); //rbnbChannelMap.PutUserInfo(channelIndex, "units=none"); // add the barTrendAsString field data channelIndex = rbnbChannelMap.Add("barTrendAsString"); // Falling Slowly rbnbChannelMap.PutUserInfo(channelIndex, "units=none"); // add the barometer field data channelIndex = rbnbChannelMap.Add("barometer"); // 29.9 rbnbChannelMap.PutUserInfo(channelIndex, "units=inch Hg"); // add the insideTemperature field data channelIndex = rbnbChannelMap.Add("insideTemperature"); // 83.9 rbnbChannelMap.PutUserInfo(channelIndex, "units=degrees F"); // add the insideHumidity field data channelIndex = rbnbChannelMap.Add("insideHumidity"); // 51 rbnbChannelMap.PutUserInfo(channelIndex, "units=percent"); // add the outsideTemperature field data channelIndex = rbnbChannelMap.Add("outsideTemperature"); // 76.7 rbnbChannelMap.PutUserInfo(channelIndex, "units=degrees F"); // add the windSpeed field data channelIndex = rbnbChannelMap.Add("windSpeed"); // 5 rbnbChannelMap.PutUserInfo(channelIndex, "units=mph"); // add the tenMinuteAverageWindSpeed field data channelIndex = rbnbChannelMap.Add("tenMinuteAverageWindSpeed"); // 4 rbnbChannelMap.PutUserInfo(channelIndex, "units=mph"); // add the windDirection field data channelIndex = rbnbChannelMap.Add("windDirection"); // 80 rbnbChannelMap.PutUserInfo(channelIndex, "units=degrees"); // add the outsideHumidity field data channelIndex = rbnbChannelMap.Add("outsideHumidity"); // 73 rbnbChannelMap.PutUserInfo(channelIndex, "units=percent"); // add the rainRate field data channelIndex = rbnbChannelMap.Add("rainRate"); // 0.0 rbnbChannelMap.PutUserInfo(channelIndex, "units=inch/hour"); // add the uvRadiation field data channelIndex = rbnbChannelMap.Add("uvRadiation"); // 0 rbnbChannelMap.PutUserInfo(channelIndex, "UV index"); // add the solarRadiation field data channelIndex = rbnbChannelMap.Add("solarRadiation"); // 0.0 rbnbChannelMap.PutUserInfo(channelIndex, "watt/m^2"); // add the stormRain field data channelIndex = rbnbChannelMap.Add("stormRain"); // 0.0 rbnbChannelMap.PutUserInfo(channelIndex, "inch"); // add the currentStormStartDate field data channelIndex = rbnbChannelMap.Add("currentStormStartDate"); // -1--1-1999 rbnbChannelMap.PutUserInfo(channelIndex, "units=none"); // add the dailyRain field data channelIndex = rbnbChannelMap.Add("dailyRain"); // 0.0 rbnbChannelMap.PutUserInfo(channelIndex, "units=inch"); // add the monthlyRain field data channelIndex = rbnbChannelMap.Add("monthlyRain"); // 0.0 rbnbChannelMap.PutUserInfo(channelIndex, "units=inch"); // add the yearlyRain field data channelIndex = rbnbChannelMap.Add("yearlyRain"); // 15.0 rbnbChannelMap.PutUserInfo(channelIndex, "units=inch"); // add the dailyEvapoTranspiration field data channelIndex = rbnbChannelMap.Add("dailyEvapoTranspiration"); // 0.0 rbnbChannelMap.PutUserInfo(channelIndex, "units=inch"); // add the monthlyEvapoTranspiration field data channelIndex = rbnbChannelMap.Add("monthlyEvapoTranspiration"); // 0.0 rbnbChannelMap.PutUserInfo(channelIndex, "units=inch"); // add the yearlyEvapoTranspiration field data channelIndex = rbnbChannelMap.Add("yearlyEvapoTranspiration"); // 93.0 rbnbChannelMap.PutUserInfo(channelIndex, "units=inch"); // add the transmitterBatteryStatus field data channelIndex = rbnbChannelMap.Add("transmitterBatteryStatus"); // 0 rbnbChannelMap.PutUserInfo(channelIndex, "units=none"); // add the consoleBatteryVoltage field data channelIndex = rbnbChannelMap.Add("consoleBatteryVoltage"); // 4.681640625 rbnbChannelMap.PutUserInfo(channelIndex, "units=volts"); // add the forecastAsString field data channelIndex = rbnbChannelMap.Add("forecastAsString"); // Partially Cloudy rbnbChannelMap.PutUserInfo(channelIndex, "units=none"); // add the forecastRuleNumberAsString field data //channelIndex = rbnbChannelMap.Add("forecastRuleNumberAsString"); // Increasing clouds with little temperature change. //rbnbChannelMap.PutUserInfo(channelIndex, "units=none"); // add the timeOfSunrise field data channelIndex = rbnbChannelMap.Add("timeOfSunrise"); // 05:49 rbnbChannelMap.PutUserInfo(channelIndex, "units=none"); // add the timeOfSunset field data channelIndex = rbnbChannelMap.Add("timeOfSunset"); // 19:11 rbnbChannelMap.PutUserInfo(channelIndex, "units=none"); channelIndex = rbnbChannelMap.Add("DecimalASCIISampleData"); // sample data as ASCII rbnbChannelMap.PutUserInfo(channelIndex, "units=none"); // register the channel map of variables and units with the DataTurbine getSource().Register(rbnbChannelMap); // reset variables for use with the incoming data rbnbChannelMap.Clear(); channelIndex = 0; // wake the instrument with an initial '\n' command this.command = this.commandSuffix; this.sentCommand = queryInstrument(this.command); // allow time for the instrument response streamingThread.sleep(2000); this.command = this.commandPrefix + this.takeSampleCommand + this.commandSuffix; this.sentCommand = queryInstrument(command); // while there are bytes to read from the socket ... while (this.socketChannel.read(buffer) != -1 || buffer.position() > 0) { // prepare the buffer for reading buffer.flip(); // while there are unread bytes in the ByteBuffer while (buffer.hasRemaining()) { byteOne = buffer.get(); //logger.debug("b1: " + new String(Hex.encodeHex((new byte[]{byteOne}))) + "\t" + // "b2: " + new String(Hex.encodeHex((new byte[]{byteTwo}))) + "\t" + // "b3: " + new String(Hex.encodeHex((new byte[]{byteThree}))) + "\t" + // "b4: " + new String(Hex.encodeHex((new byte[]{byteFour}))) + "\t" + // "sample pos: " + sampleBuffer.position() + "\t" + // "sample rem: " + sampleBuffer.remaining() + "\t" + // "sample cnt: " + sampleByteCount + "\t" + // "buffer pos: " + buffer.position() + "\t" + // "buffer rem: " + buffer.remaining() + "\t" + // "state: " + state //); // Use a State Machine to process the byte stream. // Start building an rbnb frame for the entire sample, first by // inserting a timestamp into the channelMap. This time is merely // the time of insert into the data turbine, not the time of // observations of the measurements. That time should be parsed out // of the sample in the Sink client code switch (state) { case 0: // sample line is begun by "ACK L" (the first part of ACK + "LOOP") // note bytes are in reverse order in the FIFO window if (byteOne == 0x4C && byteTwo == 0x06) { sampleByteCount++; // add the last byte found to the count // add the last byte found to the sample buffer if (sampleBuffer.remaining() > 0) { sampleBuffer.put(byteOne); } else { sampleBuffer.compact(); sampleBuffer.put(byteOne); } // we've found the beginning of a sample, move on state = 1; break; } else { break; } case 1: // read the rest of the bytes to the next EOL characters // sample line is terminated by "\n\r" // note bytes are in reverse order in the FIFO window if (byteOne == 0x0D && byteTwo == 0x0A) { sampleByteCount++; // add the last byte found to the count // add the last byte found to the sample buffer if (sampleBuffer.remaining() > 0) { sampleBuffer.put(byteOne); } else { sampleBuffer.compact(); sampleBuffer.put(byteOne); } state = 3; break; } else { // not 0x0A0D // still in the middle of the sample, keep adding bytes sampleByteCount++; // add each byte found if (sampleBuffer.remaining() > 0) { sampleBuffer.put(byteOne); } else { sampleBuffer.compact(); logger.debug("Compacting sampleBuffer ..."); sampleBuffer.put(byteOne); } break; } // end if for 0x0A0D EOL case 3: // At this point, we've found the \n\r delimiter, read the first // of 2 CRC bytes sampleByteCount++; // add the last byte found to the count // add the last byte found to the sample buffer if (sampleBuffer.remaining() > 0) { sampleBuffer.put(byteOne); } else { sampleBuffer.compact(); sampleBuffer.put(byteOne); } state = 4; break; case 4: // At this point, we've found the \n\r delimiter, read the second // of 2 CRC bytes sampleByteCount++; // add the last byte found to the count // add the last byte found to the sample buffer if (sampleBuffer.remaining() > 0) { sampleBuffer.put(byteOne); } else { sampleBuffer.compact(); sampleBuffer.put(byteOne); } state = 0; // extract just the length of the sample bytes out of the // sample buffer, and place it in the channel map as a // byte array. Then, send it to the data turbine. byte[] sampleArray = new byte[sampleByteCount]; try { sampleBuffer.flip(); sampleBuffer.get(sampleArray); // parse and send the sample to the data turbine this.davisWxParser = new DavisWxParser(sampleBuffer); } catch (java.lang.Exception e) { logger.info( "There was a problem parsing the binary weather LOOP packet. Skipping this sample."); byteOne = 0x00; byteTwo = 0x00; byteThree = 0x00; byteFour = 0x00; sampleBuffer.clear(); sampleByteCount = 0; rbnbChannelMap.Clear(); break; } // create a character string to store characters from the TCP stream StringBuilder decimalASCIISampleData = new StringBuilder(); rbnbChannelMap.PutTimeAuto("server"); // add the raw binary LOOP packet data //channelIndex = rbnbChannelMap.Add(getRBNBChannelName()); //rbnbChannelMap.PutMime(channelIndex, "application/octet-stream"); //rbnbChannelMap.PutDataAsByteArray(channelIndex, sampleArray); // raw binary LOOP packet // add the barTrendAsString field data channelIndex = rbnbChannelMap.Add("barTrendAsString"); // Falling Slowly rbnbChannelMap.PutMime(channelIndex, "text/plain"); rbnbChannelMap.PutDataAsString(channelIndex, davisWxParser.getBarTrendAsString()); decimalASCIISampleData.append( String.format("\"%16s\"", (Object) davisWxParser.getBarTrendAsString()) + ", "); // add the packetType field to the ASCII string only decimalASCIISampleData.append( String.format("%1d", (Object) new Integer(davisWxParser.getPacketType())) + ", "); // add the nextRecord field to the ASCII string only decimalASCIISampleData.append( String.format("%04d", (Object) new Integer(davisWxParser.getNextRecord())) + ", "); // add the barometer field data channelIndex = rbnbChannelMap.Add("barometer"); // 29.9 rbnbChannelMap.PutMime(channelIndex, "application/octet-stream"); rbnbChannelMap.PutDataAsFloat32(channelIndex, new float[] { davisWxParser.getBarometer() }); decimalASCIISampleData.append( String.format("%06.4f", (Object) new Float(davisWxParser.getBarometer())) + ", "); // add the insideTemperature field data channelIndex = rbnbChannelMap.Add("insideTemperature"); // 83.9 rbnbChannelMap.PutMime(channelIndex, "application/octet-stream"); rbnbChannelMap.PutDataAsFloat32(channelIndex, new float[] { davisWxParser.getInsideTemperature() }); decimalASCIISampleData.append( String.format("%05.2f", (Object) new Float(davisWxParser.getInsideTemperature())) + ", "); // add the insideHumidity field data channelIndex = rbnbChannelMap.Add("insideHumidity"); // 51 rbnbChannelMap.PutMime(channelIndex, "application/octet-stream"); rbnbChannelMap.PutDataAsInt32(channelIndex, new int[] { davisWxParser.getInsideHumidity() }); decimalASCIISampleData.append( String.format("%03d", (Object) new Integer(davisWxParser.getInsideHumidity())) + ", "); // add the outsideTemperature field data channelIndex = rbnbChannelMap.Add("outsideTemperature"); // 76.7 rbnbChannelMap.PutMime(channelIndex, "application/octet-stream"); rbnbChannelMap.PutDataAsFloat32(channelIndex, new float[] { davisWxParser.getOutsideTemperature() }); decimalASCIISampleData.append( String.format("%05.2f", (Object) new Float(davisWxParser.getOutsideTemperature())) + ", "); // add the windSpeed field data channelIndex = rbnbChannelMap.Add("windSpeed"); // 5 rbnbChannelMap.PutMime(channelIndex, "application/octet-stream"); rbnbChannelMap.PutDataAsInt32(channelIndex, new int[] { davisWxParser.getWindSpeed() }); decimalASCIISampleData.append( String.format("%03d", (Object) new Integer(davisWxParser.getWindSpeed())) + ", "); // add the tenMinuteAverageWindSpeed field data channelIndex = rbnbChannelMap.Add("tenMinuteAverageWindSpeed"); // 4 rbnbChannelMap.PutMime(channelIndex, "application/octet-stream"); rbnbChannelMap.PutDataAsInt32(channelIndex, new int[] { davisWxParser.getTenMinuteAverageWindSpeed() }); decimalASCIISampleData.append(String.format("%03d", (Object) new Integer(davisWxParser.getTenMinuteAverageWindSpeed())) + ", "); // add the windDirection field data channelIndex = rbnbChannelMap.Add("windDirection"); // 80 rbnbChannelMap.PutMime(channelIndex, "application/octet-stream"); rbnbChannelMap.PutDataAsInt32(channelIndex, new int[] { davisWxParser.getWindDirection() }); decimalASCIISampleData.append( String.format("%03d", (Object) new Integer(davisWxParser.getWindDirection())) + ", "); // add the extraTemperature fields as ASCII only float[] extraTemperatures = davisWxParser.getExtraTemperatures(); for (float temperature : extraTemperatures) { decimalASCIISampleData .append(String.format("%05.2f", (Object) new Float(temperature)) + ", "); } // add the soilTemperature fields as ASCII only float[] soilTemperatures = davisWxParser.getSoilTemperatures(); for (float soil : soilTemperatures) { decimalASCIISampleData.append(String.format("%05.2f", (Object) new Float(soil)) + ", "); } // add the leafTemperature fields as ASCII only float[] leafTemperatures = davisWxParser.getLeafTemperatures(); for (float leaf : leafTemperatures) { decimalASCIISampleData.append(String.format("%05.2f", (Object) new Float(leaf)) + ", "); } // add the outsideHumidity field data channelIndex = rbnbChannelMap.Add("outsideHumidity"); // 73 rbnbChannelMap.PutMime(channelIndex, "application/octet-stream"); rbnbChannelMap.PutDataAsInt32(channelIndex, new int[] { davisWxParser.getOutsideHumidity() }); decimalASCIISampleData.append( String.format("%03d", (Object) new Integer(davisWxParser.getOutsideHumidity())) + ", "); // add the rainRate field data channelIndex = rbnbChannelMap.Add("rainRate"); // 0.0 rbnbChannelMap.PutMime(channelIndex, "application/octet-stream"); rbnbChannelMap.PutDataAsFloat32(channelIndex, new float[] { davisWxParser.getRainRate() }); decimalASCIISampleData.append( String.format("%04.2f", (Object) new Float(davisWxParser.getRainRate())) + ", "); // add the uvRadiation field data channelIndex = rbnbChannelMap.Add("uvRadiation"); // 0 rbnbChannelMap.PutMime(channelIndex, "application/octet-stream"); rbnbChannelMap.PutDataAsInt32(channelIndex, new int[] { davisWxParser.getUvRadiation() }); decimalASCIISampleData.append( String.format("%03d", (Object) new Integer(davisWxParser.getUvRadiation())) + ", "); // add the solarRadiation field data channelIndex = rbnbChannelMap.Add("solarRadiation"); // 0.0 rbnbChannelMap.PutMime(channelIndex, "application/octet-stream"); rbnbChannelMap.PutDataAsFloat32(channelIndex, new float[] { davisWxParser.getSolarRadiation() }); decimalASCIISampleData.append( String.format("%04.1f", (Object) new Float(davisWxParser.getSolarRadiation())) + ", "); // add the stormRain field data channelIndex = rbnbChannelMap.Add("stormRain"); // 0.0 rbnbChannelMap.PutMime(channelIndex, "application/octet-stream"); rbnbChannelMap.PutDataAsFloat32(channelIndex, new float[] { davisWxParser.getStormRain() }); decimalASCIISampleData.append( String.format("%04.2f", (Object) new Float(davisWxParser.getStormRain())) + ", "); // add the currentStormStartDate field data channelIndex = rbnbChannelMap.Add("currentStormStartDate"); // -1--1-1999 rbnbChannelMap.PutMime(channelIndex, "text/plain"); rbnbChannelMap.PutDataAsString(channelIndex, davisWxParser.getCurrentStormStartDate()); decimalASCIISampleData.append( String.format("%10s", (Object) davisWxParser.getCurrentStormStartDate()) + ", "); // add the dailyRain field data channelIndex = rbnbChannelMap.Add("dailyRain"); // 0.0 rbnbChannelMap.PutMime(channelIndex, "application/octet-stream"); rbnbChannelMap.PutDataAsFloat32(channelIndex, new float[] { davisWxParser.getDailyRain() }); decimalASCIISampleData.append( String.format("%04.2f", (Object) new Float(davisWxParser.getDailyRain())) + ", "); // add the monthlyRain field data channelIndex = rbnbChannelMap.Add("monthlyRain"); // 0.0 rbnbChannelMap.PutMime(channelIndex, "application/octet-stream"); rbnbChannelMap.PutDataAsFloat32(channelIndex, new float[] { davisWxParser.getMonthlyRain() }); decimalASCIISampleData.append( String.format("%04.2f", (Object) new Float(davisWxParser.getMonthlyRain())) + ", "); // add the yearlyRain field data channelIndex = rbnbChannelMap.Add("yearlyRain"); // 15.0 rbnbChannelMap.PutMime(channelIndex, "application/octet-stream"); rbnbChannelMap.PutDataAsFloat32(channelIndex, new float[] { davisWxParser.getYearlyRain() }); decimalASCIISampleData.append( String.format("%04.2f", (Object) new Float(davisWxParser.getYearlyRain())) + ", "); // add the dailyEvapoTranspiration field data channelIndex = rbnbChannelMap.Add("dailyEvapoTranspiration"); // 0.0 rbnbChannelMap.PutMime(channelIndex, "application/octet-stream"); rbnbChannelMap.PutDataAsFloat32(channelIndex, new float[] { davisWxParser.getDailyEvapoTranspiration() }); decimalASCIISampleData.append(String.format("%04.2f", (Object) new Float(davisWxParser.getDailyEvapoTranspiration())) + ", "); // add the monthlyEvapoTranspiration field data channelIndex = rbnbChannelMap.Add("monthlyEvapoTranspiration"); // 0.0 rbnbChannelMap.PutMime(channelIndex, "application/octet-stream"); rbnbChannelMap.PutDataAsFloat32(channelIndex, new float[] { davisWxParser.getMonthlyEvapoTranspiration() }); decimalASCIISampleData.append(String.format("%04.2f", (Object) new Float(davisWxParser.getMonthlyEvapoTranspiration())) + ", "); // add the yearlyEvapoTranspiration field data channelIndex = rbnbChannelMap.Add("yearlyEvapoTranspiration"); // 93.0 rbnbChannelMap.PutMime(channelIndex, "application/octet-stream"); rbnbChannelMap.PutDataAsFloat32(channelIndex, new float[] { davisWxParser.getYearlyEvapoTranspiration() }); decimalASCIISampleData.append(String.format("%04.2f", (Object) new Float(davisWxParser.getYearlyEvapoTranspiration())) + ", "); // add the consoleBatteryVoltage field data channelIndex = rbnbChannelMap.Add("consoleBatteryVoltage"); // 4.681640625 rbnbChannelMap.PutMime(channelIndex, "application/octet-stream"); rbnbChannelMap.PutDataAsFloat32(channelIndex, new float[] { davisWxParser.getConsoleBatteryVoltage() }); decimalASCIISampleData.append(String.format("%04.2f", (Object) new Float(davisWxParser.getConsoleBatteryVoltage())) + ", "); // add the forecastAsString field data channelIndex = rbnbChannelMap.Add("forecastAsString"); // Partially Cloudy rbnbChannelMap.PutMime(channelIndex, "text/plain"); rbnbChannelMap.PutDataAsString(channelIndex, davisWxParser.getForecastAsString()); decimalASCIISampleData.append( String.format("\"%47s\"", (Object) davisWxParser.getForecastAsString()) + ", "); // add the forecastRuleNumberAsString field data as ASCII only decimalASCIISampleData.append( String.format("\"%167s\"", (Object) davisWxParser.getForecastRuleNumberAsString()) + ", "); // add the timeOfSunrise field data channelIndex = rbnbChannelMap.Add("timeOfSunrise"); // 05:49 rbnbChannelMap.PutMime(channelIndex, "text/plain"); rbnbChannelMap.PutDataAsString(channelIndex, davisWxParser.getTimeOfSunrise()); decimalASCIISampleData .append(String.format("%5s", (Object) davisWxParser.getTimeOfSunrise()) + ", "); // add the timeOfSunset field data channelIndex = rbnbChannelMap.Add("timeOfSunset"); // 19:11 rbnbChannelMap.PutMime(channelIndex, "text/plain"); rbnbChannelMap.PutDataAsString(channelIndex, davisWxParser.getTimeOfSunset()); decimalASCIISampleData .append(String.format("%5s", (Object) davisWxParser.getTimeOfSunset()) + ", "); // then add a timestamp to the end of the sample DATE_FORMAT.setTimeZone(TZ); String sampleDateAsString = DATE_FORMAT.format(new Date()).toString(); decimalASCIISampleData.append(sampleDateAsString); decimalASCIISampleData.append("\n"); // add the ASCII CSV string of selected fields as a channel channelIndex = rbnbChannelMap.Add(getRBNBChannelName()); // 19:11 rbnbChannelMap.PutMime(channelIndex, "text/plain"); rbnbChannelMap.PutDataAsString(channelIndex, decimalASCIISampleData.toString()); // finally, send the channel map of data to the DataTurbine getSource().Flush(rbnbChannelMap); String sampleString = new String(Hex.encodeHex(sampleArray)); logger.info("Sample: " + sampleString); logger.debug("barTrendAsString: " + davisWxParser.getBarTrendAsString()); logger.debug("barometer: " + davisWxParser.getBarometer()); logger.debug("insideTemperature: " + davisWxParser.getInsideTemperature()); logger.debug("insideHumidity: " + davisWxParser.getInsideHumidity()); logger.debug("outsideTemperature: " + davisWxParser.getOutsideTemperature()); logger.debug("windSpeed: " + davisWxParser.getWindSpeed()); logger.debug( "tenMinuteAverageWindSpeed: " + davisWxParser.getTenMinuteAverageWindSpeed()); logger.debug("windDirection: " + davisWxParser.getWindDirection()); logger.debug("outsideHumidity: " + davisWxParser.getOutsideHumidity()); logger.debug("rainRate: " + davisWxParser.getRainRate()); logger.debug("uvRadiation: " + davisWxParser.getUvRadiation()); logger.debug("solarRadiation: " + davisWxParser.getSolarRadiation()); logger.debug("stormRain: " + davisWxParser.getStormRain()); logger.debug("currentStormStartDate: " + davisWxParser.getCurrentStormStartDate()); logger.debug("dailyRain: " + davisWxParser.getDailyRain()); logger.debug("monthlyRain: " + davisWxParser.getMonthlyRain()); logger.debug("yearlyRain: " + davisWxParser.getYearlyRain()); logger.debug( "dailyEvapoTranspiration: " + davisWxParser.getDailyEvapoTranspiration()); logger.debug( "monthlyEvapoTranspiration: " + davisWxParser.getMonthlyEvapoTranspiration()); logger.debug( "yearlyEvapoTranspiration: " + davisWxParser.getYearlyEvapoTranspiration()); logger.debug("transmitterBatteryStatus: " + Arrays.toString(davisWxParser.getTransmitterBatteryStatus())); logger.debug("consoleBatteryVoltage: " + davisWxParser.getConsoleBatteryVoltage()); logger.debug("forecastAsString: " + davisWxParser.getForecastAsString()); //logger.debug("forecastRuleNumberAsString: " + davisWxParser.getForecastRuleNumberAsString()); logger.debug("timeOfSunrise: " + davisWxParser.getTimeOfSunrise()); logger.debug("timeOfSunset: " + davisWxParser.getTimeOfSunset()); logger.info(" flushed data to the DataTurbine. "); byteOne = 0x00; byteTwo = 0x00; byteThree = 0x00; byteFour = 0x00; sampleBuffer.clear(); sampleByteCount = 0; rbnbChannelMap.Clear(); //logger.debug("Cleared b1,b2,b3,b4. Cleared sampleBuffer. Cleared rbnbChannelMap."); //state = 0; // Once the sample is flushed, take a new sample // allow time for the instrument response streamingThread.sleep(2000); this.command = this.commandPrefix + this.takeSampleCommand + this.commandSuffix; this.sentCommand = queryInstrument(command); } // end switch statement // shift the bytes in the FIFO window byteFour = byteThree; byteThree = byteTwo; byteTwo = byteOne; } //end while (more unread bytes) // prepare the buffer to read in more bytes from the stream buffer.compact(); } // end while (more socket bytes to read) this.socketChannel.close(); } catch (IOException e) { // handle exceptions // In the event of an i/o exception, log the exception, and allow execute() // to return false, which will prompt a retry. failed = true; e.printStackTrace(); return !failed; } catch (SAPIException sapie) { // In the event of an RBNB communication exception, log the exception, // and allow execute() to return false, which will prompt a retry. failed = true; sapie.printStackTrace(); return !failed; } catch (java.lang.InterruptedException ine) { failed = true; ine.printStackTrace(); return !failed; } return !failed; }
From source file:org.wso2.msf4j.internal.router.TestMicroservice.java
@Path("/stream/upload/fail") @PUT/*from w w w . j a va2s . c o m*/ public HttpStreamHandler streamUploadFailure() { final int fileSize = 30 * 1024 * 1024; return new HttpStreamHandler() { private org.wso2.msf4j.Response response; ByteBuffer offHeapBuffer = ByteBuffer.allocateDirect(fileSize); @Override public void init(org.wso2.msf4j.Response response) { this.response = response; } @Override public void chunk(ByteBuffer content) throws Exception { offHeapBuffer.put(content.array()); } @Override public void end() throws Exception { int bytesUploaded = offHeapBuffer.position(); response.setStatus(Response.Status.OK.getStatusCode()); response.setEntity("Uploaded:" + bytesUploaded); response.send(); } @Override public void error(Throwable cause) { offHeapBuffer = null; } }; }
From source file:com.meltmedia.cadmium.core.FileSystemManager.java
public static void streamCopy(InputStream streamIn, OutputStream streamOut, boolean leaveOutputOpen) throws IOException { ReadableByteChannel input = Channels.newChannel(streamIn); WritableByteChannel output = Channels.newChannel(streamOut); ByteBuffer buffer = ByteBuffer.allocateDirect(16 * 1024); while (input.read(buffer) != -1) { buffer.flip();// w w w . ja v a2 s. c o m output.write(buffer); buffer.compact(); } buffer.flip(); // Make sure the buffer is empty while (buffer.hasRemaining()) { output.write(buffer); } input.close(); if (!leaveOutputOpen) { output.close(); } }
From source file:com.projecttango.examples.java.pointtopoint.PointToPointActivity.java
/** * Set up the callback listeners for the Tango Service and obtain other parameters required * after Tango connection.// ww w . j a v a2 s .co m * Listen to updates from the RGB camera and point cloud. */ private void startupTango() { // No need to add any coordinate frame pairs since we are not // using pose data. So just initialize. ArrayList<TangoCoordinateFramePair> framePairs = new ArrayList<TangoCoordinateFramePair>(); mTango.connectListener(framePairs, new OnTangoUpdateListener() { @Override public void onPoseAvailable(TangoPoseData pose) { // We are not using OnPoseAvailable for this app. } @Override public void onFrameAvailable(int cameraId) { // Check if the frame available is for the camera we want and update its frame // on the view. if (cameraId == TangoCameraIntrinsics.TANGO_CAMERA_COLOR) { // Mark a camera frame as available for rendering in the OpenGL thread. mIsFrameAvailableTangoThread.set(true); mSurfaceView.requestRender(); } } @Override public void onXyzIjAvailable(TangoXyzIjData xyzIj) { // We are not using onXyzIjAvailable for this app. } @Override public void onPointCloudAvailable(TangoPointCloudData pointCloud) { // Save the cloud and point data for later use. mPointCloudManager.updatePointCloud(pointCloud); } @Override public void onTangoEvent(TangoEvent event) { // We are not using OnPoseAvailable for this app. } }); mTango.experimentalConnectOnFrameListener(TangoCameraIntrinsics.TANGO_CAMERA_COLOR, new Tango.OnFrameAvailableListener() { @Override public void onFrameAvailable(TangoImageBuffer tangoImageBuffer, int i) { mCurrentImageBuffer = copyImageBuffer(tangoImageBuffer); } TangoImageBuffer copyImageBuffer(TangoImageBuffer imageBuffer) { ByteBuffer clone = ByteBuffer.allocateDirect(imageBuffer.data.capacity()); imageBuffer.data.rewind(); clone.put(imageBuffer.data); imageBuffer.data.rewind(); clone.flip(); return new TangoImageBuffer(imageBuffer.width, imageBuffer.height, imageBuffer.stride, imageBuffer.frameNumber, imageBuffer.timestamp, imageBuffer.format, clone); } }); }
From source file:org.ros.android.rviz_for_android.drawable.loader.ColladaLoader.java
private Map<String, ETC1Texture> getTextures(String prefix) { Map<String, ETC1Texture> retval = new HashMap<String, ETC1Texture>(); String filesDirectory = serverConnection.getContext().getFilesDir().toString() + "/"; // Find which types of acceptable textures are present (diffuse, bump, etc) for (textureType t : textureType.values()) { if (attributeExists("/COLLADA/library_effects/", t.toString(), "texture/@texture")) { String texPointer = super.existResult; String filename = null; // If the image library has an image with texPointer's ID, use that // otherwise, follow the pointer trail if (attributeExists("/COLLADA/library_images/image[@id='" + texPointer + "']/init_from")) { filename = super.existResult; Log.d("DAE", "Shortcut to texture name: " + filename); } else { // Locate the image ID from the texture pointer String imgID = getSingleAttribute( "/COLLADA/library_effects//newparam[@sid='" + texPointer + "']/sampler2D/source"); // Locate the image name String imgName = getSingleAttribute( "/COLLADA/library_effects//newparam[@sid='" + imgID + "']/surface/init_from"); // Locate the filename filename = getSingleAttribute("/COLLADA/library_images/image[@id='" + imgName + "']/init_from"); }/*from w w w . j a va 2s. c om*/ Log.d("DAE", "Filename = " + filename); if (filename.length() == 0) Log.e("DAE", "Filename = 0 length!"); // If a cached compressed copy exists, load that. Otherwise, download, compress, and save the image String compressedFilename = "COMPRESSED_" + serverConnection.getSanitizedPrefix(imgPrefix) + filename; if (!serverConnection.fileExists(compressedFilename)) { Log.i("DAE", "No compressed cached copy exists."); // Load the uncompressed image String downloadedFilename = serverConnection.getFile(imgPrefix + filename); Bitmap uncompressed = null; if (downloadedFilename == null) { Log.e("DAE", "Unable to get file " + imgPrefix + filename + " from server!"); uncompressed = Bitmap.createBitmap(new int[] { 0, 0 }, 1, 1, Bitmap.Config.RGB_565); } else uncompressed = openTextureFile(filesDirectory, downloadedFilename); // Flip the image Matrix flip = new Matrix(); flip.postScale(1f, -1f); Bitmap uncompressed_two = Bitmap.createBitmap(uncompressed, 0, 0, uncompressed.getWidth(), uncompressed.getHeight(), flip, true); uncompressed.recycle(); // Compress the image ETC1Texture compressed = compressBitmap(uncompressed_two); // Save the compressed texture try { BufferedOutputStream bout = new BufferedOutputStream(serverConnection.getContext() .openFileOutput(compressedFilename, Context.MODE_WORLD_READABLE)); bout.write(compressed.getData().array()); bout.close(); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } // Add the compressed texture to the return map retval.put(t.toString(), compressed); } else { Log.i("DAE", "A compressed cached copy exists!"); // Load the existing compressed texture try { byte[] dataArray = IOUtils .toByteArray(serverConnection.getContext().openFileInput(compressedFilename)); // Determine the dimensions of the image int bytes = 2 * dataArray.length; int width = 1024; int height = 1024; while ((width * height) > bytes && (width * height) >= 1) { width /= 2; height /= 2; } Log.i("DAE", "Compressed size determined to be " + width + " x " + height); ByteBuffer dataBuffer = ByteBuffer.allocateDirect(dataArray.length) .order(ByteOrder.nativeOrder()); dataBuffer.put(dataArray); dataBuffer.position(0); ETC1Texture compressed = new ETC1Texture(width, height, dataBuffer); retval.put(t.toString(), compressed); } catch (FileNotFoundException e) { Log.e("DAE", "Compressed texture not found!"); e.printStackTrace(); } catch (IOException e) { Log.e("DAE", "IOException!"); e.printStackTrace(); } } } } return retval; }
From source file:com.ibm.crail.tools.CrailBenchmark.java
void readRandom(String filename, int size, int loop, boolean buffered) throws Exception { System.out.println("readRandom, filename " + filename + ", size " + size + ", loop " + loop + ", buffered " + buffered);//from www . j av a 2 s . c o m CrailBuffer buf = null; if (size == CrailConstants.BUFFER_SIZE) { buf = fs.allocateBuffer(); } else if (size < CrailConstants.BUFFER_SIZE) { CrailBuffer _buf = fs.allocateBuffer(); _buf.clear().limit(size); buf = _buf.slice(); } else { buf = OffHeapBuffer.wrap(ByteBuffer.allocateDirect(size)); } //warmup ConcurrentLinkedQueue<CrailBuffer> bufferQueue = new ConcurrentLinkedQueue<CrailBuffer>(); bufferQueue.add(buf); warmUp(filename, warmup, bufferQueue); //benchmark System.out.println("starting benchmark..."); fs.getStatistics().reset(); CrailFile file = fs.lookup(filename).get().asFile(); CrailBufferedInputStream bufferedStream = file.getBufferedInputStream(file.getCapacity()); CrailInputStream directStream = file.getDirectInputStream(file.getCapacity()); double sumbytes = 0; double ops = 0; long _range = file.getCapacity() - ((long) buf.capacity()); double range = (double) _range; Random random = new Random(); long start = System.currentTimeMillis(); while (ops < loop) { if (buffered) { buf.clear(); double _offset = range * random.nextDouble(); long offset = (long) _offset; directStream.seek(offset); double ret = (double) directStream.read(buf).get().getLen(); if (ret > 0) { sumbytes = sumbytes + ret; ops = ops + 1.0; } else { break; } } else { buf.clear(); double _offset = range * random.nextDouble(); long offset = (long) _offset; bufferedStream.seek(offset); double ret = (double) bufferedStream.read(buf.getByteBuffer()); if (ret > 0) { sumbytes = sumbytes + ret; ops = ops + 1.0; } else { break; } } } long end = System.currentTimeMillis(); double executionTime = ((double) (end - start)) / 1000.0; double throughput = 0.0; double latency = 0.0; double sumbits = sumbytes * 8.0; if (executionTime > 0) { throughput = sumbits / executionTime / 1000.0 / 1000.0; latency = 1000000.0 * executionTime / ops; } bufferedStream.close(); directStream.close(); System.out.println("execution time " + executionTime); System.out.println("ops " + ops); System.out.println("sumbytes " + sumbytes); System.out.println("throughput " + throughput); System.out.println("latency " + latency); fs.getStatistics().print("close"); }
From source file:org.pentaho.di.trans.steps.csvinput.CsvInput.java
private boolean openNextFile() throws KettleException { try {// w w w .ja v a 2 s . c o m // Close the previous file... // data.closeFile(); if (data.filenr >= data.filenames.length) { return false; } // Open the next one... // FileObject fileObject = KettleVFS.getFileObject(data.filenames[data.filenr], getTransMeta()); if (!(fileObject instanceof LocalFile)) { // We can only use NIO on local files at the moment, so that's what we limit ourselves to. // throw new KettleException(BaseMessages.getString(PKG, "CsvInput.Log.OnlyLocalFilesAreSupported")); } if (meta.isLazyConversionActive()) { data.binaryFilename = data.filenames[data.filenr].getBytes(); } data.fis = new FileInputStream(KettleVFS.getFilename(fileObject)); data.fc = data.fis.getChannel(); data.bb = ByteBuffer.allocateDirect(data.preferredBufferSize); // If we are running in parallel and we need to skip bytes in the first file, let's do so here. // if (data.parallel) { if (data.bytesToSkipInFirstFile > 0) { data.fc.position(data.bytesToSkipInFirstFile); // Now, we need to skip the first row, until the first CR that is. // readOneRow(true, true); } } // Add filename to result filenames ? if (meta.isAddResultFile()) { ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_GENERAL, fileObject, getTransMeta().getName(), toString()); resultFile.setComment("File was read by a Csv input step"); addResultFile(resultFile); } // Move to the next filename // data.filenr++; // See if we need to skip a row... // - If you have a header row checked and if you're not running in parallel // - If you're running in parallel, if a header row is checked, if you're at the beginning of a file // if (meta.isHeaderPresent()) { // Standard flat file : skip header if (!data.parallel || data.bytesToSkipInFirstFile <= 0) { readOneRow(true, false); // skip this row. logBasic(BaseMessages.getString(PKG, "CsvInput.Log.HeaderRowSkipped", data.filenames[data.filenr - 1])); } } // Reset the row number pointer... // data.rowNumber = 1L; // Don't skip again in the next file... // data.bytesToSkipInFirstFile = -1L; return true; } catch (KettleException e) { throw e; } catch (Exception e) { throw new KettleException(e); } }
From source file:org.apache.sysml.runtime.matrix.data.LibMatrixNative.java
private static FloatBuffer toFloatBuffer(double[] input, ThreadLocal<FloatBuffer> buff, boolean copy) { //maintain thread-local buffer (resized on demand) FloatBuffer ret = buff.get(); if (ret == null || ret.capacity() < input.length) { ret = ByteBuffer.allocateDirect(4 * input.length).order(ByteOrder.nativeOrder()).asFloatBuffer(); buff.set(ret);// w w w .j av a 2 s .com } //copy to direct byte buffer final FloatBuffer ret2 = ret; if (copy) { IntStream.range(0, input.length).parallel().forEach(i -> ret2.put(i, (float) input[i])); } return ret2; }