List of usage examples for java.nio ByteBuffer allocateDirect
public static ByteBuffer allocateDirect(int capacity)
From source file:thproject.test.com.myapplication.SongRecognitionActivity.java
public void startStreaming() throws GnException { isListening = true;// w w w .ja v a2s .c om gnMicrophone = new GnMic(44100, 16, 1); //mic initialization gnMicrophone.sourceInit(); //initialize music stream gnMusicIdStream = new GnMusicIdStream(gnUser, new GnMusicIdStreamEvents()); gnMusicIdStream.audioProcessStart(gnMicrophone.samplesPerSecond(), gnMicrophone.sampleSizeInBits(), gnMicrophone.numberOfChannels()); Thread audioProcessThread = new Thread(new Runnable() { @Override public void run() { ByteBuffer byteBuffer = ByteBuffer.allocateDirect(1024 * 4); long bytesRead = 0; // Log.d("startRecording","begin loop"); while (isListening) { bytesRead = gnMicrophone.getData(byteBuffer, byteBuffer.capacity()); try { gnMusicIdStream.audioProcess(byteBuffer.array(), bytesRead); } catch (GnException e) { e.printStackTrace(); } } } }); audioProcessThread.start(); gnMusicIdStream.identifyAlbumAsync(); }
From source file:org.restcomm.connect.http.filters.FileCacheServlet.java
/** * Stream the given input to the given output via NIO {@link Channels} and a * directly allocated NIO {@link ByteBuffer}. Both the input and output * streams will implicitly be closed after streaming, regardless of whether * an exception is been thrown or not.// ww w . ja va 2 s . co m * * @param input The input stream. * @param output The output stream. * @param bufferSize * @return The length of the written bytes. * @throws IOException When an I/O error occurs. */ public static long stream(InputStream input, OutputStream output, int bufferSize) throws IOException { try (ReadableByteChannel inputChannel = Channels.newChannel(input); WritableByteChannel outputChannel = Channels.newChannel(output)) { ByteBuffer buffer = ByteBuffer.allocateDirect(bufferSize); long size = 0; while (inputChannel.read(buffer) != -1) { buffer.flip(); size += outputChannel.write(buffer); buffer.clear(); } return size; } }
From source file:org.activiti.engine.test.api.repository.diagram.ProcessDiagramRetrievalTest.java
private static boolean isEqual(InputStream stream1, InputStream stream2) throws IOException { ReadableByteChannel channel1 = Channels.newChannel(stream1); ReadableByteChannel channel2 = Channels.newChannel(stream2); ByteBuffer buffer1 = ByteBuffer.allocateDirect(1024); ByteBuffer buffer2 = ByteBuffer.allocateDirect(1024); try {/* www. j ava 2 s. c o m*/ while (true) { int bytesReadFromStream1 = channel1.read(buffer1); int bytesReadFromStream2 = channel2.read(buffer2); if (bytesReadFromStream1 == -1 || bytesReadFromStream2 == -1) return bytesReadFromStream1 == bytesReadFromStream2; buffer1.flip(); buffer2.flip(); for (int i = 0; i < Math.min(bytesReadFromStream1, bytesReadFromStream2); i++) if (buffer1.get() != buffer2.get()) return false; buffer1.compact(); buffer2.compact(); } } finally { if (stream1 != null) stream1.close(); if (stream2 != null) stream2.close(); } }
From source file:org.jtrfp.trcl.obj.WorldObject.java
public final void initializeObjectDefinitions() { if (model == null) throw new NullPointerException("Model is null. Did you forget to set it?"); final ArrayList<Integer> opaqueIndicesList = new ArrayList<Integer>(); final ArrayList<Integer> transparentIndicesList = new ArrayList<Integer>(); tr.getThreadManager().submitToThreadPool(new Callable<Void>() { @Override/* w ww . j a v a 2 s . c om*/ public Void call() throws Exception { tr.getThreadManager().submitToGPUMemAccess(new Callable<Void>() { @Override public Void call() throws Exception { processPrimitiveList(model.getTriangleList(), triangleObjectDefinitions, opaqueIndicesList); processPrimitiveList(model.getTransparentTriangleList(), transparentTriangleObjectDefinitions, transparentIndicesList); return null; } }).get();//TODO: Make non-blocking ByteOrder order = getTr().gpu.get().getByteOrder(); opaqueObjectDefinitionAddressesInVec4 = ByteBuffer.allocateDirect(opaqueIndicesList.size() * 4) .order(order);// 4 bytes per int opaqueObjectDefinitionAddressesInVEC4 = new VEC4Address[opaqueIndicesList.size()]; for (int i = 0; i < opaqueIndicesList.size(); i++) opaqueObjectDefinitionAddressesInVEC4[i] = new VEC4Address(opaqueIndicesList.get(i)); transparentObjectDefinitionAddressesInVec4 = ByteBuffer .allocateDirect(transparentIndicesList.size() * 4).order(order); transparentObjectDefinitionAddressesInVEC4 = new VEC4Address[transparentIndicesList.size()]; for (int i = 0; i < transparentIndicesList.size(); i++) transparentObjectDefinitionAddressesInVEC4[i] = new VEC4Address(transparentIndicesList.get(i)); IntBuffer trans = transparentObjectDefinitionAddressesInVec4.asIntBuffer(), opaque = opaqueObjectDefinitionAddressesInVec4.asIntBuffer(); for (Integer elm : transparentIndicesList) trans.put(elm); for (Integer elm : opaqueIndicesList) opaque.put(elm); return null; } }); }
From source file:edu.hawaii.soest.kilonalu.ctd.SBE37Source.java
/** * A method that executes the streaming of data from the source to the RBNB * server after all configuration of settings, connections to hosts, and * thread initiatizing occurs. This method contains the detailed code for * streaming the data and interpreting the stream. *///from w w w . java 2 s. c om protected boolean execute() { logger.debug("SBE37Source.execute() called."); // do not execute the stream if there is no connection if (!isConnected()) return false; boolean failed = false; // while data are being sent, read them into the buffer try { this.socketChannel = getSocketConnection(); // create four byte placeholders used to evaluate up to a four-byte // window. The FIFO layout looks like: // ------------------------- // in ---> | One | Two |Three|Four | ---> out // ------------------------- byte byteOne = 0x00, // set initial placeholder values byteTwo = 0x00, byteThree = 0x00, byteFour = 0x00; // Create a buffer that will store the sample bytes as they are read ByteBuffer sampleBuffer = ByteBuffer.allocate(getBufferSize()); // create a byte buffer to store bytes from the TCP stream ByteBuffer buffer = ByteBuffer.allocateDirect(getBufferSize()); // create a character string to store characters from the TCP stream StringBuilder responseString = new StringBuilder(); // add a channel of data that will be pushed to the server. // Each sample will be sent to the Data Turbine as an rbnb frame. ChannelMap rbnbChannelMap = new ChannelMap(); int channelIndex = rbnbChannelMap.Add(getRBNBChannelName()); // wake the instrument with an initial take sample command this.command = this.commandPrefix + getInstrumentID() + "TS" + this.commandSuffix; this.sentCommand = queryInstrument(this.command); // verify the instrument ID is correct while (getInstrumentID() == null) { // allow time for the instrument response streamingThread.sleep(2000); buffer.clear(); // send the command and update the sentCommand status this.sentCommand = queryInstrument(this.command); // read the response into the buffer. Note that the streamed bytes // are 8-bit, not 16-bit Unicode characters. Use the US-ASCII // encoding instead. while (this.socketChannel.read(buffer) != -1 || buffer.position() > 0) { buffer.flip(); while (buffer.hasRemaining()) { String nextCharacter = new String(new byte[] { buffer.get() }, "US-ASCII"); responseString.append(nextCharacter); } // look for the command line ending if (responseString.toString().indexOf("S>") > 0) { // parse the ID from the idCommand response int idStartIndex = responseString.indexOf("=") + 2; int idStopIndex = responseString.indexOf("=") + 4; String idString = responseString.substring(idStartIndex, idStopIndex); // test that the ID is a valid number and set the instrument ID if ((new Integer(idString)).intValue() > 0) { setInstrumentID(idString); buffer.clear(); logger.debug("Instrument ID is " + getInstrumentID() + "."); break; } else { logger.debug("Instrument ID \"" + idString + "\" was not set."); } } else { break; } buffer.compact(); if (getInstrumentID() != null) { break; } } } // instrumentID is set // allow time for the instrument response streamingThread.sleep(5000); this.command = this.commandPrefix + getInstrumentID() + this.takeSampleCommand + this.commandSuffix; this.sentCommand = queryInstrument(command); // while there are bytes to read from the socket ... while (this.socketChannel.read(buffer) != -1 || buffer.position() > 0) { // prepare the buffer for reading buffer.flip(); // while there are unread bytes in the ByteBuffer while (buffer.hasRemaining()) { byteOne = buffer.get(); //logger.debug("b1: " + new String(Hex.encodeHex((new byte[]{byteOne}))) + "\t" + // "b2: " + new String(Hex.encodeHex((new byte[]{byteTwo}))) + "\t" + // "b3: " + new String(Hex.encodeHex((new byte[]{byteThree}))) + "\t" + // "b4: " + new String(Hex.encodeHex((new byte[]{byteFour}))) + "\t" + // "sample pos: " + sampleBuffer.position() + "\t" + // "sample rem: " + sampleBuffer.remaining() + "\t" + // "sample cnt: " + sampleByteCount + "\t" + // "buffer pos: " + buffer.position() + "\t" + // "buffer rem: " + buffer.remaining() + "\t" + // "state: " + state //); // Use a State Machine to process the byte stream. // Start building an rbnb frame for the entire sample, first by // inserting a timestamp into the channelMap. This time is merely // the time of insert into the data turbine, not the time of // observations of the measurements. That time should be parsed out // of the sample in the Sink client code switch (state) { case 0: // sample line is begun by S> // note bytes are in reverse order in the FIFO window if (byteOne == 0x3E && byteTwo == 0x53) { // we've found the beginning of a sample, move on state = 1; break; } else { break; } case 1: // read the rest of the bytes to the next EOL characters // sample line is terminated by S> // note bytes are in reverse order in the FIFO window if (byteOne == 0x3E && byteTwo == 0x53) { sampleByteCount++; // add the last byte found to the count // add the last byte found to the sample buffer if (sampleBuffer.remaining() > 0) { sampleBuffer.put(byteOne); } else { sampleBuffer.compact(); sampleBuffer.put(byteOne); } // extract just the length of the sample bytes (less 2 bytes // to exclude the 'S>' prompt characters) out of the // sample buffer, and place it in the channel map as a // byte array. Then, send it to the data turbine. byte[] sampleArray = new byte[sampleByteCount - 2]; sampleBuffer.flip(); sampleBuffer.get(sampleArray); // send the sample to the data turbine rbnbChannelMap.PutTimeAuto("server"); String sampleString = new String(sampleArray, "US-ASCII"); rbnbChannelMap.PutMime(channelIndex, "text/plain"); rbnbChannelMap.PutDataAsString(channelIndex, sampleString); getSource().Flush(rbnbChannelMap); logger.info("Sample: " + sampleString); logger.info("flushed data to the DataTurbine. "); byteOne = 0x00; byteTwo = 0x00; byteThree = 0x00; byteFour = 0x00; sampleBuffer.clear(); sampleByteCount = 0; //rbnbChannelMap.Clear(); //logger.debug("Cleared b1,b2,b3,b4. Cleared sampleBuffer. Cleared rbnbChannelMap."); //state = 0; // Once the sample is flushed, take a new sample if (getInstrumentID() != null) { // allow time for the instrument response streamingThread.sleep(2000); this.command = this.commandPrefix + getInstrumentID() + this.takeSampleCommand + this.commandSuffix; this.sentCommand = queryInstrument(command); } } else { // not 0x0A0D // still in the middle of the sample, keep adding bytes sampleByteCount++; // add each byte found if (sampleBuffer.remaining() > 0) { sampleBuffer.put(byteOne); } else { sampleBuffer.compact(); logger.debug("Compacting sampleBuffer ..."); sampleBuffer.put(byteOne); } break; } // end if for 0x0A0D EOL } // end switch statement // shift the bytes in the FIFO window byteFour = byteThree; byteThree = byteTwo; byteTwo = byteOne; } //end while (more unread bytes) // prepare the buffer to read in more bytes from the stream buffer.compact(); } // end while (more socket bytes to read) this.socketChannel.close(); } catch (IOException e) { // handle exceptions // In the event of an i/o exception, log the exception, and allow execute() // to return false, which will prompt a retry. failed = true; e.printStackTrace(); return !failed; } catch (SAPIException sapie) { // In the event of an RBNB communication exception, log the exception, // and allow execute() to return false, which will prompt a retry. failed = true; sapie.printStackTrace(); return !failed; } catch (java.lang.InterruptedException ie) { ie.printStackTrace(); } return !failed; }
From source file:com.panet.imeta.trans.steps.csvinput.CsvInput.java
private boolean openNextFile() throws KettleException { try {//from w ww .j av a 2s .c om // Close the previous file... // if (data.fc != null) { data.fc.close(); } if (data.fis != null) { data.fis.close(); } if (data.filenr >= data.filenames.length) { return false; } // Open the next one... // FileObject fileObject = KettleVFS.getFileObject(data.filenames[data.filenr]); if (!(fileObject instanceof LocalFile)) { // We can only use NIO on local files at the moment, so that's what we limit ourselves to. // throw new KettleException(Messages.getString("CsvInput.Log.OnlyLocalFilesAreSupported")); } if (meta.isLazyConversionActive()) { data.binaryFilename = data.filenames[data.filenr].getBytes(); } data.fis = (FileInputStream) ((LocalFile) fileObject).getInputStream(); data.fc = data.fis.getChannel(); data.bb = ByteBuffer.allocateDirect(data.preferredBufferSize); // If we are running in parallel and we need to skip bytes in the first file, let's do so here. // if (data.parallel) { if (data.bytesToSkipInFirstFile > 0) { data.fc.position(data.bytesToSkipInFirstFile); // Now, we need to skip the first row, until the first CR that is. // readOneRow(false); } } // Add filename to result filenames ? if (meta.isAddResultFile()) { ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_GENERAL, fileObject, getTransMeta().getName(), toString()); resultFile.setComment("File was read by a Csv input step"); addResultFile(resultFile); } // Move to the next filename // data.filenr++; // See if we need to skip a row... // - If you have a header row checked and if you're not running in parallel // - If you're running in parallel, if a header row is checked, if you're at the beginning of a file // if (meta.isHeaderPresent()) { if ((!data.parallel) || // Standard flat file : skip header (data.parallel && data.bytesToSkipInFirstFile <= 0)) { readOneRow(false); // skip this row. logBasic(Messages.getString("CsvInput.Log.HeaderRowSkipped", data.filenames[data.filenr - 1])); } } // Reset the row number pointer... // data.rowNumber = 1L; // Don't skip again in the next file... // data.bytesToSkipInFirstFile = -1L; return true; } catch (Exception e) { throw new KettleException(e); } }
From source file:edu.hawaii.soest.kilonalu.flntu.FLNTUSource.java
/** * A method that executes the streaming of data from the source to the RBNB * server after all configuration of settings, connections to hosts, and * thread initiatizing occurs. This method contains the detailed code for * streaming the data and interpreting the stream. *//*from w w w.ja v a 2 s . c o m*/ protected boolean execute() { logger.debug("FLNTUSource.execute() called."); // do not execute the stream if there is no connection if (!isConnected()) return false; boolean failed = false; SocketChannel socket = getSocketConnection(); // while data are being sent, read them into the buffer try { // create four byte placeholders used to evaluate up to a four-byte // window. The FIFO layout looks like: // ------------------------- // in ---> | One | Two |Three|Four | ---> out // ------------------------- byte byteOne = 0x00, // set initial placeholder values byteTwo = 0x00, byteThree = 0x00, byteFour = 0x00; // Create a buffer that will store the sample bytes as they are read ByteBuffer sampleBuffer = ByteBuffer.allocate(getBufferSize()); // create a byte buffer to store bytes from the TCP stream ByteBuffer buffer = ByteBuffer.allocateDirect(getBufferSize()); // add a channel of data that will be pushed to the server. // Each sample will be sent to the Data Turbine as an rbnb frame. ChannelMap rbnbChannelMap = new ChannelMap(); // while there are bytes to read from the socket ... while (socket.read(buffer) != -1 || buffer.position() > 0) { // prepare the buffer for reading buffer.flip(); // while there are unread bytes in the ByteBuffer while (buffer.hasRemaining()) { byteOne = buffer.get(); logger.debug("char: " + (char) byteOne + "\t" + "b1: " + new String(Hex.encodeHex((new byte[] { byteOne }))) + "\t" + "b2: " + new String(Hex.encodeHex((new byte[] { byteTwo }))) + "\t" + "b3: " + new String(Hex.encodeHex((new byte[] { byteThree }))) + "\t" + "b4: " + new String(Hex.encodeHex((new byte[] { byteFour }))) + "\t" + "sample pos: " + sampleBuffer.position() + "\t" + "sample rem: " + sampleBuffer.remaining() + "\t" + "sample cnt: " + sampleByteCount + "\t" + "buffer pos: " + buffer.position() + "\t" + "buffer rem: " + buffer.remaining() + "\t" + "state: " + state); // Use a State Machine to process the byte stream. switch (state) { case 0: // sample sets begin with 'mvs 1\r\n' and end with 'mvs 0\r\n'. Find the // beginning of the sample set using the 4-byte window (s 1\r\n) // note bytes are in reverse order in the FIFO window if (byteOne == 0x0A && byteTwo == 0x0D && byteThree == 0x31 && byteFour == 0x20) { // we've found the beginning of a sample set, move on state = 1; break; } else { break; } case 1: // read the rest of the bytes to the next EOL characters // sample line is terminated by record delimiter byte (\r\n) // note bytes are in reverse order in the FIFO window if (byteOne == 0x0A && byteTwo == 0x0D && byteThree == 0x30 && byteFour == 0x20) { // we've found the sample set ending, clear buffers and return // to state 0 to wait for the next set byteOne = 0x00; byteTwo = 0x00; byteThree = 0x00; byteFour = 0x00; sampleBuffer.clear(); sampleByteCount = 0; rbnbChannelMap.Clear(); logger.debug("Cleared b1,b2,b3,b4. Cleared sampleBuffer. Cleared rbnbChannelMap."); state = 0; // if we're not at the sample set end, look for individual samples } else if (byteOne == 0x0A && byteTwo == 0x0D) { // found the sample ending delimiter // add in the sample delimiter to the sample buffer if (sampleBuffer.remaining() > 0) { sampleBuffer.put(byteOne); sampleByteCount++; } else { sampleBuffer.compact(); logger.debug("Compacting sampleBuffer ..."); sampleBuffer.put(byteOne); sampleByteCount++; } // extract just the length of the sample bytes out of the // sample buffer, and place it in the channel map as a // byte array. Then, send it to the data turbine. byte[] sampleArray = new byte[sampleByteCount]; sampleBuffer.flip(); sampleBuffer.get(sampleArray); // send the sample to the data turbine rbnbChannelMap.PutTimeAuto("server"); String sampleString = new String(sampleArray, "US-ASCII"); int channelIndex = rbnbChannelMap.Add(getRBNBChannelName()); rbnbChannelMap.PutMime(channelIndex, "text/plain"); rbnbChannelMap.PutDataAsString(channelIndex, sampleString); getSource().Flush(rbnbChannelMap); logger.info("Sample: " + sampleString.substring(0, sampleString.length() - 2) + " sent data to the DataTurbine. "); byteOne = 0x00; byteTwo = 0x00; byteThree = 0x00; byteFour = 0x00; sampleBuffer.clear(); sampleByteCount = 0; rbnbChannelMap.Clear(); logger.debug("Cleared b1,b2,b3,b4. Cleared sampleBuffer. Cleared rbnbChannelMap."); break; } else { // not 0x0 // still in the middle of the sample, keep adding bytes sampleByteCount++; // add each byte found if (sampleBuffer.remaining() > 0) { sampleBuffer.put(byteOne); } else { sampleBuffer.compact(); logger.debug("Compacting sampleBuffer ..."); sampleBuffer.put(byteOne); } break; } // end if for 0x0D20 EOL } // end switch statement // shift the bytes in the FIFO window byteFour = byteThree; byteThree = byteTwo; byteTwo = byteOne; } //end while (more unread bytes) // prepare the buffer to read in more bytes from the stream buffer.compact(); } // end while (more socket bytes to read) socket.close(); } catch (IOException e) { // handle exceptions // In the event of an i/o exception, log the exception, and allow execute() // to return false, which will prompt a retry. failed = true; e.printStackTrace(); return !failed; } catch (SAPIException sapie) { // In the event of an RBNB communication exception, log the exception, // and allow execute() to return false, which will prompt a retry. failed = true; sapie.printStackTrace(); return !failed; } return !failed; }
From source file:eu.stratosphere.nephele.services.iomanager.IOManagerPerformanceBenchmark.java
@SuppressWarnings("resource") private final void speedTestNIO(int bufferSize, boolean direct) throws IOException { final Channel.ID tmpChannel = ioManager.createChannel(); File tempFile = null;/* ww w . ja va2 s. c o m*/ FileChannel fs = null; try { tempFile = new File(tmpChannel.getPath()); RandomAccessFile raf = new RandomAccessFile(tempFile, "rw"); fs = raf.getChannel(); ByteBuffer buf = direct ? ByteBuffer.allocateDirect(bufferSize) : ByteBuffer.allocate(bufferSize); long writeStart = System.currentTimeMillis(); int valsLeft = NUM_INTS_WRITTEN; while (valsLeft-- > 0) { if (buf.remaining() < 4) { buf.flip(); fs.write(buf); buf.clear(); } buf.putInt(valsLeft); } if (buf.position() > 0) { buf.flip(); fs.write(buf); } fs.close(); raf.close(); fs = null; long writeElapsed = System.currentTimeMillis() - writeStart; // ---------------------------------------------------------------- raf = new RandomAccessFile(tempFile, "r"); fs = raf.getChannel(); buf.clear(); long readStart = System.currentTimeMillis(); fs.read(buf); buf.flip(); valsLeft = NUM_INTS_WRITTEN; while (valsLeft-- > 0) { if (buf.remaining() < 4) { buf.compact(); fs.read(buf); buf.flip(); } if (buf.getInt() != valsLeft) { throw new IOException(); } } fs.close(); raf.close(); long readElapsed = System.currentTimeMillis() - readStart; LOG.info("NIO Channel with buffer " + bufferSize + ": write " + writeElapsed + " msecs, read " + readElapsed + " msecs."); } finally { // close if possible if (fs != null) { fs.close(); fs = null; } // try to delete the file if (tempFile != null) { tempFile.delete(); } } }
From source file:org.apache.sysml.runtime.matrix.data.SinglePrecisionCudaSupportFunctions.java
@Override public void hostToDevice(GPUContext gCtx, double[] src, Pointer dest, String instName) { LOG.debug("Potential OOM: Allocated additional space in hostToDevice"); // TODO: Perform conversion on GPU using double2float and float2double kernels long t0 = DMLScript.STATISTICS ? System.nanoTime() : 0; if (PERFORM_CONVERSION_ON_DEVICE) { Pointer deviceDoubleData = gCtx.allocate(((long) src.length) * Sizeof.DOUBLE); cudaMemcpy(deviceDoubleData, Pointer.to(src), ((long) src.length) * Sizeof.DOUBLE, cudaMemcpyHostToDevice); LibMatrixCUDA.double2float(gCtx, deviceDoubleData, dest, src.length); gCtx.cudaFreeHelper(instName, deviceDoubleData, DMLScript.EAGER_CUDA_FREE); } else {//w ww . j a v a2s . co m FloatBuffer floatData = ByteBuffer.allocateDirect(Sizeof.FLOAT * src.length) .order(ByteOrder.nativeOrder()).asFloatBuffer(); IntStream.range(0, src.length).parallel().forEach(i -> floatData.put(i, (float) src[i])); cudaMemcpy(dest, Pointer.to(floatData), ((long) src.length) * Sizeof.FLOAT, cudaMemcpyHostToDevice); } if (DMLScript.STATISTICS) { long totalTime = System.nanoTime() - t0; GPUStatistics.cudaDouble2FloatTime.add(totalTime); GPUStatistics.cudaDouble2FloatCount.add(1); if (DMLScript.FINEGRAINED_STATISTICS && instName != null) GPUStatistics.maintainCPMiscTimes(instName, GPUInstruction.MISC_TIMER_HOST_TO_DEVICE, totalTime); } }
From source file:hivemall.mf.BPRMatrixFactorizationUDTF.java
@Override public StructObjectInspector initialize(ObjectInspector[] argOIs) throws UDFArgumentException { if (argOIs.length != 3 && argOIs.length != 4) { throw new UDFArgumentException(getClass().getSimpleName() + " takes 3 or 4 arguments: INT user, INT posItem, INT negItem [, CONSTANT STRING options]"); }//from w w w . j a v a2 s.c o m this.userOI = HiveUtils.asIntCompatibleOI(argOIs[0]); this.posItemOI = HiveUtils.asIntCompatibleOI(argOIs[1]); this.negItemOI = HiveUtils.asIntCompatibleOI(argOIs[2]); processOptions(argOIs); this.model = new FactorizedModel(this, factor, rankInit); this.count = 0L; this.lastWritePos = 0L; this.uProbe = new float[factor]; this.iProbe = new float[factor]; this.jProbe = new float[factor]; if (mapredContext != null && iterations > 1) { // invoke only at task node (initialize is also invoked in compilation) final File file; try { file = File.createTempFile("hivemall_bprmf", ".sgmt"); file.deleteOnExit(); if (!file.canWrite()) { throw new UDFArgumentException("Cannot write a temporary file: " + file.getAbsolutePath()); } } catch (IOException ioe) { throw new UDFArgumentException(ioe); } catch (Throwable e) { throw new UDFArgumentException(e); } this.fileIO = new NioFixedSegment(file, RECORD_BYTES, false); this.inputBuf = ByteBuffer.allocateDirect(65536); // 64 KiB } ArrayList<String> fieldNames = new ArrayList<String>(); ArrayList<ObjectInspector> fieldOIs = new ArrayList<ObjectInspector>(); fieldNames.add("idx"); fieldOIs.add(PrimitiveObjectInspectorFactory.writableIntObjectInspector); fieldNames.add("Pu"); fieldOIs.add(ObjectInspectorFactory .getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableFloatObjectInspector)); fieldNames.add("Qi"); fieldOIs.add(ObjectInspectorFactory .getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableFloatObjectInspector)); if (useBiasClause) { fieldNames.add("Bi"); fieldOIs.add(PrimitiveObjectInspectorFactory.writableFloatObjectInspector); } return ObjectInspectorFactory.getStandardStructObjectInspector(fieldNames, fieldOIs); }