Example usage for java.nio ByteBuffer clear

List of usage examples for java.nio ByteBuffer clear

Introduction

In this page you can find the example usage for java.nio ByteBuffer clear.

Prototype

public final Buffer clear() 

Source Link

Document

Clears this buffer.

Usage

From source file:hivemall.mf.OnlineMatrixFactorizationUDTF.java

protected final void runIterativeTraining(@Nonnegative final int iterations) throws HiveException {
    final ByteBuffer inputBuf = this.inputBuf;
    final NioFixedSegment fileIO = this.fileIO;
    assert (inputBuf != null);
    assert (fileIO != null);
    final long numTrainingExamples = count;

    final Reporter reporter = getReporter();
    final Counter iterCounter = (reporter == null) ? null
            : reporter.getCounter("hivemall.mf.MatrixFactorization$Counter", "iteration");

    try {//from   ww  w.j  a  va 2  s.c o  m
        if (lastWritePos == 0) {// run iterations w/o temporary file
            if (inputBuf.position() == 0) {
                return; // no training example
            }
            inputBuf.flip();

            int iter = 2;
            for (; iter <= iterations; iter++) {
                reportProgress(reporter);
                setCounterValue(iterCounter, iter);

                while (inputBuf.remaining() > 0) {
                    int user = inputBuf.getInt();
                    int item = inputBuf.getInt();
                    double rating = inputBuf.getDouble();
                    // invoke train
                    count++;
                    train(user, item, rating);
                }
                cvState.multiplyLoss(0.5d);
                if (cvState.isConverged(iter, numTrainingExamples)) {
                    break;
                }
                inputBuf.rewind();
            }
            logger.info("Performed " + Math.min(iter, iterations) + " iterations of "
                    + NumberUtils.formatNumber(numTrainingExamples) + " training examples on memory (thus "
                    + NumberUtils.formatNumber(count) + " training updates in total) ");
        } else {// read training examples in the temporary file and invoke train for each example

            // write training examples in buffer to a temporary file
            if (inputBuf.position() > 0) {
                writeBuffer(inputBuf, fileIO, lastWritePos);
            } else if (lastWritePos == 0) {
                return; // no training example
            }
            try {
                fileIO.flush();
            } catch (IOException e) {
                throw new HiveException("Failed to flush a file: " + fileIO.getFile().getAbsolutePath(), e);
            }
            if (logger.isInfoEnabled()) {
                File tmpFile = fileIO.getFile();
                logger.info(
                        "Wrote " + numTrainingExamples + " records to a temporary file for iterative training: "
                                + tmpFile.getAbsolutePath() + " (" + FileUtils.prettyFileSize(tmpFile) + ")");
            }

            // run iterations
            int iter = 2;
            for (; iter <= iterations; iter++) {
                setCounterValue(iterCounter, iter);

                inputBuf.clear();
                long seekPos = 0L;
                while (true) {
                    reportProgress(reporter);
                    // TODO prefetch
                    // writes training examples to a buffer in the temporary file
                    final int bytesRead;
                    try {
                        bytesRead = fileIO.read(seekPos, inputBuf);
                    } catch (IOException e) {
                        throw new HiveException("Failed to read a file: " + fileIO.getFile().getAbsolutePath(),
                                e);
                    }
                    if (bytesRead == 0) { // reached file EOF
                        break;
                    }
                    assert (bytesRead > 0) : bytesRead;
                    seekPos += bytesRead;

                    // reads training examples from a buffer
                    inputBuf.flip();
                    int remain = inputBuf.remaining();
                    assert (remain > 0) : remain;
                    for (; remain >= RECORD_BYTES; remain -= RECORD_BYTES) {
                        int user = inputBuf.getInt();
                        int item = inputBuf.getInt();
                        double rating = inputBuf.getDouble();
                        // invoke train
                        count++;
                        train(user, item, rating);
                    }
                    inputBuf.compact();
                }
                cvState.multiplyLoss(0.5d);
                if (cvState.isConverged(iter, numTrainingExamples)) {
                    break;
                }
            }
            logger.info("Performed " + Math.min(iter, iterations) + " iterations of "
                    + NumberUtils.formatNumber(numTrainingExamples)
                    + " training examples using a secondary storage (thus " + NumberUtils.formatNumber(count)
                    + " training updates in total)");
        }
    } finally {
        // delete the temporary file and release resources
        try {
            fileIO.close(true);
        } catch (IOException e) {
            throw new HiveException("Failed to close a file: " + fileIO.getFile().getAbsolutePath(), e);
        }
        this.inputBuf = null;
        this.fileIO = null;
    }
}

From source file:cn.ac.ncic.mastiff.io.coding.RedBlackTreeStringReader.java

@Override
public byte[] ensureDecompressed() throws IOException {
    DataOutputBuffer transfer = new DataOutputBuffer();
    transfer.write(inBuf.getData(), 12, inBuf.getLength() - 12);
    DataInputBuffer dib = new DataInputBuffer();
    dib.reset(transfer.getData(), 0, transfer.getLength());
    int dictionarySize = dib.readInt();
    int length1 = dib.readInt();
    byte[] data = transfer.getData();
    transfer.close();//from w  w w .ja v  a  2 s.c  om
    dib.reset(data, Integer.SIZE + Integer.SIZE, length1);
    FlexibleEncoding.ORC.StreamName name = new FlexibleEncoding.ORC.StreamName(0,
            OrcProto.Stream.Kind.DICTIONARY_DATA);
    ByteBuffer inBuf1 = ByteBuffer.allocate(length1);
    inBuf1.put(dib.getData(), 0, dib.getLength());
    inBuf1.flip();
    InStream in = InStream.create("test1", inBuf1, null, dictionarySize);
    if (in.available() > 0) {
        dictionaryBuffer = new DynamicByteArray(64, in.available());
        dictionaryBuffer.readAll(in);
        in.close();
        // read the lengths    google  proto buffer
        name = new StreamName(1, OrcProto.Stream.Kind.LENGTH);
        dib.reset(data, 4 + 4 + length1, 4);
        int length2 = dib.readInt();
        dib.reset(data, 4 + 4 + length1 + 4, length2);
        //  in = streams.get(name);
        ByteBuffer inBuf2 = ByteBuffer.allocate(length2);
        inBuf2.put(dib.getData(), 0, length2);
        inBuf2.flip();
        in = InStream.create("test2", inBuf2, null, dictionarySize);
        //    IntegerReader lenReader = createIntegerReader(encodings.get(columnId)
        //        .getKind(), in, false);
        IntegerReader lenReader = createIntegerReader(OrcProto.ColumnEncoding.Kind.DIRECT_V2, in, false);
        int offset = 0;
        dictionaryOffsets = new int[dictionarySize + 1];
        for (int i = 0; i < dictionarySize; ++i) {
            dictionaryOffsets[i] = offset;
            offset += (int) lenReader.next();
        }
        dictionaryOffsets[dictionarySize] = offset;
        in.close();
        name = new FlexibleEncoding.ORC.StreamName(2, OrcProto.Stream.Kind.DATA);
        dib.reset(data, 4 + 4 + length1 + 4 + length2, 4);
        int length3 = dib.readInt();
        dib.reset(data, 4 + 4 + length1 + 4 + length2 + 4, length3);
        ByteBuffer inBuf3 = ByteBuffer.allocate(length3);
        inBuf3.put(dib.getData(), 0, length3);
        inBuf3.flip();
        in = InStream.create("test3", inBuf3, null, dictionarySize);
        reader = createIntegerReader(OrcProto.ColumnEncoding.Kind.DIRECT_V2, in, false);
    }
    inBuf.close();
    DataOutputBuffer decoding = new DataOutputBuffer();
    DataOutputBuffer offsets = new DataOutputBuffer();
    decoding.writeInt(decompressedSize);
    decoding.writeInt(numPairs);
    decoding.writeInt(startPos);
    int dataoffset = 12;
    String str;
    for (int i = 0; i < numPairs; i++) {
        str = readEachValue(null);
        decoding.writeUTF(str);
        //      if(i<5){
        //        System.out.println("304  bin[i]  "+str+"  decoding    "+ decoding.size());
        //      }
        dataoffset = decoding.size();
        offsets.writeInt(dataoffset);
    }
    System.out.println("315  offset.size() " + offsets.size() + "  decoding.szie   " + decoding.size());
    System.out.println("316  dataoffet   " + dataoffset);
    decoding.write(offsets.getData(), 0, offsets.size());
    inBuf.close();
    offsets.close();
    dib.close();
    System.out.println("316   decoding   " + decoding.size() + decoding.getLength() + " decoding.getData()   "
            + decoding.getData().length);
    inBuf1.clear();
    return decoding.getData();
}

From source file:org.neo4j.io.pagecache.PageCacheTest.java

@Test(timeout = SHORT_TIMEOUT_MILLIS)
public void evictionMustFlushPagesToTheRightFiles() throws IOException {
    generateFileWithRecords(file("a"), recordCount, recordSize);

    int filePageSize2 = filePageSize - 3; // diff. page size just to be difficult
    long maxPageIdCursor1 = recordCount / recordsPerFilePage;
    File file2 = file("b");
    OutputStream outputStream = fs.openAsOutputStream(file2, false);
    long file2sizeBytes = (maxPageIdCursor1 + 17) * filePageSize2;
    for (int i = 0; i < file2sizeBytes; i++) {
        // We will ues the page cache to change these 'a's into 'b's.
        outputStream.write('a');
    }//from w w  w .  j  av  a  2  s. co  m
    outputStream.flush();
    outputStream.close();

    getPageCache(fs, maxPages, pageCachePageSize, PageCacheTracer.NULL);

    PagedFile pagedFile1 = pageCache.map(file("a"), filePageSize);
    PagedFile pagedFile2 = pageCache.map(file2, filePageSize2);

    long pageId1 = 0;
    long pageId2 = 0;
    boolean moreWorkToDo;
    do {
        boolean cursorReady1;
        boolean cursorReady2;

        try (PageCursor cursor = pagedFile1.io(pageId1, PF_SHARED_WRITE_LOCK)) {
            cursorReady1 = cursor.next() && cursor.getCurrentPageId() < maxPageIdCursor1;
            if (cursorReady1) {
                writeRecords(cursor);
                pageId1++;
            }
        }

        try (PageCursor cursor = pagedFile2.io(pageId2, PF_SHARED_WRITE_LOCK | PF_NO_GROW)) {
            cursorReady2 = cursor.next();
            if (cursorReady2) {
                do {
                    for (int i = 0; i < filePageSize2; i++) {
                        cursor.putByte((byte) 'b');
                    }
                } while (cursor.shouldRetry());
            }
            pageId2++;
        }

        moreWorkToDo = cursorReady1 || cursorReady2;
    } while (moreWorkToDo);

    pagedFile1.close();
    pagedFile2.close();

    // Verify the file contents
    assertThat(fs.getFileSize(file2), is(file2sizeBytes));
    InputStream inputStream = fs.openAsInputStream(file2);
    for (int i = 0; i < file2sizeBytes; i++) {
        int b = inputStream.read();
        assertThat(b, is((int) 'b'));
    }
    assertThat(inputStream.read(), is(-1));
    inputStream.close();

    StoreChannel channel = fs.open(file("a"), "r");
    ByteBuffer bufB = ByteBuffer.allocate(recordSize);
    for (int i = 0; i < recordCount; i++) {
        bufA.clear();
        channel.read(bufA);
        bufA.flip();
        bufB.clear();
        generateRecordForId(i, bufB);
        assertThat(bufB.array(), byteArray(bufA.array()));
    }
}

From source file:com.inclouds.hbase.rowcache.RowCache.java

/**
 * CHECKED 2/*from  ww w  . j  a  v  a2 s  .c om*/
 * 
 * Sets the family column. Format:
 * 
 * // List of columns: // 4 bytes - total columns // Column: // 4 bytes -
 * qualifier length // qualifier // 4 bytes - total versions // list of
 * versions: // { 8 bytes - timestamp // 4 bytes - value length // value // }
 * 
 * @param tableName
 *          the table name
 * @param family
 *          the family
 * @param bundle
 *          the bundle
 * @throws IOException
 *           Signals that an I/O exception has occurred.
 */
private void upsertFamilyColumns(byte[] tableName, byte[] row, byte[] family, List<KeyValue> bundle)
        throws IOException {
    if (bundle.size() == 0)
        return;
    // Get first
    ByteBuffer buf = getLocalByteBuffer();// bufTLS.get();

    try {
        prepareKeyForPut(buf, tableName, row, 0, row.length, family, null);

        // buffer position is at beginning of a value block
        int valueSize = Bytes.SIZEOF_INT;
        int numColumns = 0;
        int startPosition = buf.position();
        // Skip numColumns
        skip(buf, Bytes.SIZEOF_INT);
        while (bundle.size() > 0) {
            valueSize += addColumn(buf, bundle);
            numColumns++;
        }
        buf.putInt(startPosition, numColumns);
        // Update value len
        buf.putInt(4, valueSize);
        // Now we have K-V pair in a buffer - put it into cache
    } catch (BufferOverflowException e) {
        LOG.error("[row-cache] Ignore put op. The row:family is too large and exceeds the limit " + ioBufferSize
                + " bytes.");
        buf.clear();
        return;
    }
    doPut(buf);

}

From source file:com.castis.sysComp.PoisConverterSysComp.java

private void writeClientUIFile(List<sceneDTO> list, String platform, File file) throws FileNotFoundException {

    FileOutputStream fos = null;//ww  w.j a  va 2s .  com
    String dir = filePolling.getValidFileDirectory(resultDir);

    String fileName = file.getName();
    String tempDir = dir + "/temp/";
    File targetDirectory = new File(CiFileUtil.getReplaceFullPath(tempDir));
    if (!targetDirectory.isDirectory()) {
        CiFileUtil.createDirectory(tempDir);
    }

    fos = new FileOutputStream(tempDir + fileName);
    int byteSize = 2048;
    ByteBuffer byteBuffer = ByteBuffer.allocateDirect(byteSize);
    GatheringByteChannel outByteCh = fos.getChannel();

    try {
        for (int i = 0; i < list.size(); i++) {
            sceneDTO scene = list.get(i);
            StringBuffer strBuffer = new StringBuffer();
            if (i == 0) {
                strBuffer.append("policy");
                strBuffer.append("|");
                strBuffer.append(platform);
                strBuffer.append("|");
                strBuffer.append("ClientUI");

                strBuffer.append("\r\n");
            }
            strBuffer.append("info");
            strBuffer.append("|");
            strBuffer.append(platform);
            strBuffer.append("|");
            strBuffer.append(scene.getId());
            strBuffer.append("|");
            strBuffer.append(scene.getName());
            strBuffer.append("|");
            strBuffer.append(scene.getTemplateFileName());
            strBuffer.append("|");
            strBuffer.append(scene.getMenuId());
            strBuffer.append("|");
            strBuffer.append(scene.getMenuName());
            strBuffer.append("|");
            strBuffer.append(scene.getSpaceId());
            strBuffer.append("|");
            strBuffer.append(scene.getSpaceName());
            strBuffer.append("|");
            strBuffer.append(scene.getResolution());
            strBuffer.append("|");
            strBuffer.append(scene.getResolutionOnFocus());
            strBuffer.append("|");
            strBuffer.append(scene.getSizeLimit());
            strBuffer.append("|");
            strBuffer.append(scene.getClickable());

            strBuffer.append("\r\n");

            byte[] outByte = null;
            try {
                outByte = strBuffer.toString().getBytes("UTF-8");
            } catch (UnsupportedEncodingException e2) {
                e2.printStackTrace();
            }
            byteBuffer.put(outByte);
            byteBuffer.flip();
            try {
                outByteCh.write(byteBuffer);
            } catch (IOException e) {
            }
            byteBuffer.clear();
        }

        fos.close();

        String targetDir = resultDir;
        File sourceFile = new File(tempDir + fileName);

        int index = fileName.indexOf("-");
        if (index != -1) {
            fileName = fileName.substring(index + 1, fileName.length());
        }
        index = fileName.indexOf("_");

        if (index != -1) {
            String directory = fileName.substring(0, index);
            targetDir += "/" + directory;
        }

        index = fileName.indexOf(".");
        if (index != -1) {
            fileName = fileName.substring(0, index) + ".csv";
        }

        try {

            File resultTargetDir = new File(CiFileUtil.getReplaceFullPath(targetDir));
            if (!resultTargetDir.isDirectory()) {
                CiFileUtil.createDirectory(targetDir);
            }

            CiFileUtil.renameFile(sourceFile, targetDir, fileName);
        } catch (Exception e) {
            log.error(e.getMessage());
        }

    } catch (Exception e) {
        String errorMsg = e.getMessage();
        log.error(errorMsg, e);
        throw new DataParsingException(errorMsg, e); //throw(e);

    }
}

From source file:libepg.ts.reader.Reader2.java

/**
 * ??????<br>/*from   www.  j  a v a  2 s  . c om*/
 * 1:???????????1??????<br>
 * 2:?????????????<br>
 * 3:??????1??????<br>
 * ???????????<br>
 * 4:1?<br>
 *
 * @return ???
 */
public synchronized List<TsPacket> getPackets() {
    ByteBuffer packetBuffer = ByteBuffer.allocate(TsPacket.TS_PACKET_BYTE_LENGTH.PACKET_LENGTH.getByteLength());
    byte[] byteData = new byte[1];

    //?
    List<TsPacket> packets = new ArrayList<>();

    FileInputStream fis = null;
    PushbackInputStream pis = null;
    try {

        fis = new FileInputStream(this.TSFile);
        pis = new PushbackInputStream(fis);

        boolean tipOfPacket = false;//?

        long count = 0;

        //??????1??????
        while (pis.read(byteData) != EOF) {

            //???????????????
            if ((byteData[0] == TsPacket.TS_SYNC_BYTE) && (tipOfPacket == false)) {
                tipOfPacket = true;
                if (LOG.isTraceEnabled() && NOT_DETERRENT_READ_TRACE_LOG) {
                    LOG.trace(
                            "???????????1????");
                }
                pis.unread(byteData);
            }

            if (tipOfPacket == true) {
                byte[] tsPacketData = new byte[TsPacket.TS_PACKET_BYTE_LENGTH.PACKET_LENGTH.getByteLength()];
                if (pis.read(tsPacketData) != EOF) {
                    if (LOG.isTraceEnabled() && NOT_DETERRENT_READ_TRACE_LOG) {
                        LOG.trace(
                                "??????????????");
                    }
                    packetBuffer.put(tsPacketData);
                } else {
                    break;
                }
            }

            if (packetBuffer.remaining() == 0) {
                byte[] BeforeCutDown = packetBuffer.array();
                byte[] AfterCutDown = new byte[packetBuffer.position()];
                System.arraycopy(BeforeCutDown, 0, AfterCutDown, 0, AfterCutDown.length);

                //??????????
                TsPacket tsp = new TsPacket(AfterCutDown);

                //                        LOG.debug(Hex.encodeHexString(tsp.getData()));
                if (LOG.isTraceEnabled() && NOT_DETERRENT_READ_TRACE_LOG) {
                    LOG.trace(
                            "1???????? ");
                    LOG.trace(tsp.toString());
                }

                if (tsp.getTransport_error_indicator() != 0) {
                    if (LOG.isWarnEnabled()) {
                        LOG.warn(
                                "??1????????????????????");
                        LOG.warn(tsp);
                        LOG.warn(TSFile);
                    }
                    tipOfPacket = false;
                } else {
                    packets.add(tsp);
                    count++;
                }
                packetBuffer.clear();
                tipOfPacket = false;

                if (this.readLimit != null && count >= this.readLimit) {
                    if (LOG.isInfoEnabled()) {
                        LOG.info(
                                "????????????? ?? = "
                                        + this.readLimit);
                    }
                    break;
                }
            }

        }
        if (LOG.isTraceEnabled() && NOT_DETERRENT_READ_TRACE_LOG) {
            LOG.trace("?????????");
            LOG.trace(" = " + Hex.encodeHexString(packetBuffer.array()));
        }

        pis.close();
        fis.close();
        LOG.info("??? = " + count);

    } catch (FileNotFoundException e) {
        LOG.fatal("?????", e);
    } catch (IOException e) {
        LOG.fatal("???", e);
    }
    return Collections.unmodifiableList(packets);
}

From source file:org.mitre.math.linear.BufferRealMatrix.java

/**
 * Returns the result of postmultiplying this by m.
 *
 * @param m    matrix to postmultiply by
 * @return     this * m//from   ww  w  . j  ava2s.c om
 * @throws     IllegalArgumentException
 *             if columnDimension(this) != rowDimension(m)
 */
public BufferRealMatrix multiply(final BufferRealMatrix b) throws IllegalArgumentException {

    // safety check
    MatrixUtils.checkMultiplicationCompatible(this, b);

    try {
        final BufferRealMatrix c = new BufferRealMatrix(rows, b.columns, null);
        // allocate one row for our matrix
        final ByteBuffer abb = ByteBuffer.allocate(BLOCK_SIZE * DOUBLE_BYTE_SIZE);
        // for some funny reason we can't get an array, even if we wrap it before! So, allocate it here and use latter
        //  final double[] ar = new double[BLOCK_SIZE]; This isn't faster

        // perform multiplication block-wise, to ensure good cache behavior
        int blockIndex = 0;
        for (int iBlock = 0; iBlock < c.blockRows; ++iBlock) {
            final int pStart = iBlock * BLOCK_SIZE;
            final int pEnd = Math.min(pStart + BLOCK_SIZE, rows);
            //System.err.printf("pStart=%d\tpEnd=%d\tblockRows=%d\tblockColumns=%d\n", pStart, pEnd, c.blockRows, c.blockColumns);
            for (int jBlock = 0; jBlock < c.blockColumns; ++jBlock) {
                final int jWidth = BLOCK_SIZE; // square block no matter what
                final int jWidth2 = jWidth + jWidth;
                final int jWidth3 = jWidth2 + jWidth;
                final int jWidth4 = jWidth3 + jWidth;

                // select current product block
                DoubleBuffer cdb = c.dataFileChannel
                        .map(FileChannel.MapMode.READ_WRITE, c.getBlockOffset(blockIndex), BLOCK_BYTE_SIZE)
                        .asDoubleBuffer();
                cdb.clear();

                // perform multiplication on current block
                for (int kBlock = 0; kBlock < blockColumns; ++kBlock) {
                    //final int kWidth = blockWidth(kBlock);
                    final int kWidth = BLOCK_SIZE;

                    LOG.debug(String.format("Getting a block %d and b block %d", iBlock * blockColumns + kBlock,
                            kBlock * b.blockColumns + jBlock));

                    // walk down the blocks columns
                    DoubleBuffer bdb = b.dataFileChannel
                            .map(FileChannel.MapMode.READ_WRITE,
                                    b.getBlockOffset(kBlock * b.blockColumns + jBlock), BLOCK_BYTE_SIZE)
                            .asDoubleBuffer();
                    bdb.clear();

                    LOG.debug("Processing blocks");
                    for (int p = pStart, k = 0; p < pEnd; ++p) {
                        // a's width (# cols) is the same as b's height (# rows) and c's width
                        final int lStart = (p - pStart) * kWidth; // Square padded with zeros    
                        final int lEnd = blockWidth(kBlock); // Can stop at the last column in a's block
                        //System.err.printf("k=%d\tp=%d\tlstart=%d\tlend=%d\t\n", k, p, lStart, lEnd);
                        // For each row in a, multiple the columns in b
                        // Can stop at the last column in the c's block which should be the last column in b

                        // walk across A's blocks rows grabbing a row at a time
                        abb.clear();
                        this.dataFileChannel.position(this.getBlockOffset(iBlock * blockColumns + kBlock)
                                + (lStart * DOUBLE_BYTE_SIZE));
                        final int r = this.dataFileChannel.read(abb); // relative get into local bytebuffer
                        //System.err.printf("Got %d bytes (%d doubles) for %d block width\n", r, r / DOUBLE_BYTE_SIZE, kWidth);
                        if (r == -1) {
                            LOG.fatal("Unable to read in data");
                        }
                        abb.clear();
                        final DoubleBuffer adb = abb.asDoubleBuffer();
                        adb.clear();
                        // tried getting access to local copy (array) but it wasn't faster access

                        for (int nStart = 0; nStart < c.blockWidth(jBlock); ++nStart) {
                            double sum = 0;
                            int l = 0; // first column in this row
                            int n = nStart;
                            // do four at a time (why four?)
                            adb.position(l);

                            while (l < lEnd - 3) {
                                sum += adb.get() * bdb.get(n) + adb.get() * bdb.get(n + jWidth)
                                        + adb.get() * bdb.get(n + jWidth2) + adb.get() * bdb.get(n + jWidth3);
                                l += 4;
                                n += jWidth4;
                            }
                            while (l < lEnd) {
                                sum += adb.get() * bdb.get(n);
                                n += jWidth;
                                l++;
                            }
                            sum += cdb.get(k);
                            cdb.put(k++, sum);
                            //System.err.printf("k=%d\tn=%d\n", k, n);
                        }
                        // correct k for difference in blockWidth since we are always square
                        k = (p + 1) * BLOCK_SIZE;
                        //System.err.printf("end of p-loop (%d), k=%d\n", p, k);
                    }
                }
                this.dataFileChannel.force(false);
                System.err.printf("Finished block %d\n", blockIndex);
                // go to next block
                ++blockIndex;
            }
        }
        return c;
    } catch (IOException ex) {
        throw new IllegalArgumentException(ex.getMessage());
    }
}

From source file:hivemall.fm.FactorizationMachineUDTF.java

protected void runTrainingIteration(int iterations) throws HiveException {
    final ByteBuffer inputBuf = this._inputBuf;
    final NioStatefullSegment fileIO = this._fileIO;
    assert (inputBuf != null);
    assert (fileIO != null);
    final long numTrainingExamples = _t;
    final boolean adaregr = _va_rand != null;

    final Reporter reporter = getReporter();
    final Counter iterCounter = (reporter == null) ? null
            : reporter.getCounter("hivemall.fm.FactorizationMachines$Counter", "iteration");

    try {/*from www.ja  va  2 s.c o m*/
        if (fileIO.getPosition() == 0L) {// run iterations w/o temporary file
            if (inputBuf.position() == 0) {
                return; // no training example
            }
            inputBuf.flip();

            int iter = 2;
            for (; iter <= iterations; iter++) {
                reportProgress(reporter);
                setCounterValue(iterCounter, iter);

                while (inputBuf.remaining() > 0) {
                    int bytes = inputBuf.getInt();
                    assert (bytes > 0) : bytes;
                    int xLength = inputBuf.getInt();
                    final Feature[] x = new Feature[xLength];
                    for (int j = 0; j < xLength; j++) {
                        x[j] = instantiateFeature(inputBuf);
                    }
                    double y = inputBuf.getDouble();
                    // invoke train
                    ++_t;
                    train(x, y, adaregr);
                }
                if (_cvState.isConverged(iter, numTrainingExamples)) {
                    break;
                }
                inputBuf.rewind();
            }
            LOG.info("Performed " + Math.min(iter, iterations) + " iterations of "
                    + NumberUtils.formatNumber(numTrainingExamples) + " training examples on memory (thus "
                    + NumberUtils.formatNumber(_t) + " training updates in total) ");
        } else {// read training examples in the temporary file and invoke train for each example

            // write training examples in buffer to a temporary file
            if (inputBuf.remaining() > 0) {
                writeBuffer(inputBuf, fileIO);
            }
            try {
                fileIO.flush();
            } catch (IOException e) {
                throw new HiveException("Failed to flush a file: " + fileIO.getFile().getAbsolutePath(), e);
            }
            if (LOG.isInfoEnabled()) {
                File tmpFile = fileIO.getFile();
                LOG.info(
                        "Wrote " + numTrainingExamples + " records to a temporary file for iterative training: "
                                + tmpFile.getAbsolutePath() + " (" + FileUtils.prettyFileSize(tmpFile) + ")");
            }

            // run iterations
            int iter = 2;
            for (; iter <= iterations; iter++) {
                setCounterValue(iterCounter, iter);

                inputBuf.clear();
                fileIO.resetPosition();
                while (true) {
                    reportProgress(reporter);
                    // TODO prefetch
                    // writes training examples to a buffer in the temporary file
                    final int bytesRead;
                    try {
                        bytesRead = fileIO.read(inputBuf);
                    } catch (IOException e) {
                        throw new HiveException("Failed to read a file: " + fileIO.getFile().getAbsolutePath(),
                                e);
                    }
                    if (bytesRead == 0) { // reached file EOF
                        break;
                    }
                    assert (bytesRead > 0) : bytesRead;

                    // reads training examples from a buffer
                    inputBuf.flip();
                    int remain = inputBuf.remaining();
                    if (remain < INT_BYTES) {
                        throw new HiveException("Illegal file format was detected");
                    }
                    while (remain >= INT_BYTES) {
                        int pos = inputBuf.position();
                        int recordBytes = inputBuf.getInt();
                        remain -= INT_BYTES;
                        if (remain < recordBytes) {
                            inputBuf.position(pos);
                            break;
                        }

                        final int xLength = inputBuf.getInt();
                        final Feature[] x = new Feature[xLength];
                        for (int j = 0; j < xLength; j++) {
                            x[j] = instantiateFeature(inputBuf);
                        }
                        double y = inputBuf.getDouble();

                        // invoke training
                        ++_t;
                        train(x, y, adaregr);

                        remain -= recordBytes;
                    }
                    inputBuf.compact();
                }
                if (_cvState.isConverged(iter, numTrainingExamples)) {
                    break;
                }
            }
            LOG.info("Performed " + Math.min(iter, iterations) + " iterations of "
                    + NumberUtils.formatNumber(numTrainingExamples)
                    + " training examples on a secondary storage (thus " + NumberUtils.formatNumber(_t)
                    + " training updates in total)");
        }
    } finally {
        // delete the temporary file and release resources
        try {
            fileIO.close(true);
        } catch (IOException e) {
            throw new HiveException("Failed to close a file: " + fileIO.getFile().getAbsolutePath(), e);
        }
        this._inputBuf = null;
        this._fileIO = null;
    }
}

From source file:edu.hawaii.soest.pacioos.text.SocketTextSource.java

@Override
protected boolean execute() {

    log.debug("SocketTextSource.execute() called.");
    // do not execute the stream if there is no connection
    if (!isConnected())
        return false;

    boolean failed = false;

    /* Get a connection to the instrument */
    SocketChannel socket = getSocketConnection();
    if (socket == null)
        return false;

    // while data are being sent, read them into the buffer
    try {// ww  w .ja  v  a  2  s.c o  m
        // create four byte placeholders used to evaluate up to a four-byte 
        // window.  The FIFO layout looks like:
        //           -------------------------
        //   in ---> | One | Two |Three|Four |  ---> out
        //           -------------------------
        byte byteOne = 0x00, // set initial placeholder values
                byteTwo = 0x00, byteThree = 0x00, byteFour = 0x00;

        // Create a buffer that will store the sample bytes as they are read
        ByteBuffer sampleBuffer = ByteBuffer.allocate(getBufferSize());

        // create a byte buffer to store bytes from the TCP stream
        ByteBuffer buffer = ByteBuffer.allocateDirect(getBufferSize());

        // while there are bytes to read from the socket ...
        while (socket.read(buffer) != -1 || buffer.position() > 0) {

            // prepare the buffer for reading
            buffer.flip();

            // while there are unread bytes in the ByteBuffer
            while (buffer.hasRemaining()) {
                byteOne = buffer.get();

                // log the byte stream
                String character = new String(new byte[] { byteOne });
                if (log.isDebugEnabled()) {
                    List<Byte> whitespaceBytes = new ArrayList<Byte>();
                    whitespaceBytes.add(new Byte((byte) 0x0A));
                    whitespaceBytes.add(new Byte((byte) 0x0D));
                    if (whitespaceBytes.contains(new Byte(byteOne))) {
                        character = new String(Hex.encodeHex((new byte[] { byteOne })));

                    }
                }
                log.debug("char: " + character + "\t" + "b1: "
                        + new String(Hex.encodeHex((new byte[] { byteOne }))) + "\t" + "b2: "
                        + new String(Hex.encodeHex((new byte[] { byteTwo }))) + "\t" + "b3: "
                        + new String(Hex.encodeHex((new byte[] { byteThree }))) + "\t" + "b4: "
                        + new String(Hex.encodeHex((new byte[] { byteFour }))) + "\t" + "sample pos: "
                        + sampleBuffer.position() + "\t" + "sample rem: " + sampleBuffer.remaining() + "\t"
                        + "sample cnt: " + sampleByteCount + "\t" + "buffer pos: " + buffer.position() + "\t"
                        + "buffer rem: " + buffer.remaining() + "\t" + "state: " + state);

                // evaluate each byte to find the record delimiter(s), and when found, validate and
                // send the sample to the DataTurbine.
                int numberOfChannelsFlushed = 0;

                if (getRecordDelimiters().length == 2) {
                    // have we hit the delimiters in the stream yet?
                    if (byteTwo == getFirstDelimiterByte() && byteOne == getSecondDelimiterByte()) {
                        sampleBuffer.put(byteOne);
                        sampleByteCount++;
                        // extract just the length of the sample bytes out of the
                        // sample buffer, and place it in the channel map as a 
                        // byte array.  Then, send it to the DataTurbine.
                        log.debug("Sample byte count: " + sampleByteCount);
                        byte[] sampleArray = new byte[sampleByteCount];
                        sampleBuffer.flip();
                        sampleBuffer.get(sampleArray);
                        String sampleString = new String(sampleArray, "US-ASCII");

                        if (validateSample(sampleString)) {
                            numberOfChannelsFlushed = sendSample(sampleString);

                        }

                        sampleBuffer.clear();
                        sampleByteCount = 0;
                        byteOne = 0x00;
                        byteTwo = 0x00;
                        byteThree = 0x00;
                        byteFour = 0x00;
                        log.debug("Cleared b1,b2,b3,b4. Cleared sampleBuffer. Cleared rbnbChannelMap.");

                    } else {
                        // still in the middle of the sample, keep adding bytes
                        sampleByteCount++; // add each byte found

                        if (sampleBuffer.remaining() > 0) {
                            sampleBuffer.put(byteOne);

                        } else {
                            sampleBuffer.compact();
                            log.debug("Compacting sampleBuffer ...");
                            sampleBuffer.put(byteOne);

                        }

                    }

                } else if (getRecordDelimiters().length == 1) {
                    // have we hit the delimiter in the stream yet?
                    if (byteOne == getFirstDelimiterByte()) {
                        sampleBuffer.put(byteOne);
                        sampleByteCount++;
                        // extract just the length of the sample bytes out of the
                        // sample buffer, and place it in the channel map as a 
                        // byte array.  Then, send it to the DataTurbine.
                        byte[] sampleArray = new byte[sampleByteCount];
                        sampleBuffer.flip();
                        sampleBuffer.get(sampleArray);
                        String sampleString = new String(sampleArray, "US-ASCII");

                        if (validateSample(sampleString)) {
                            numberOfChannelsFlushed = sendSample(sampleString);

                        }

                        sampleBuffer.clear();
                        sampleByteCount = 0;
                        byteOne = 0x00;
                        byteTwo = 0x00;
                        byteThree = 0x00;
                        byteFour = 0x00;
                        log.debug("Cleared b1,b2,b3,b4. Cleared sampleBuffer. Cleared rbnbChannelMap.");

                    } else {
                        // still in the middle of the sample, keep adding bytes
                        sampleByteCount++; // add each byte found

                        if (sampleBuffer.remaining() > 0) {
                            sampleBuffer.put(byteOne);

                        } else {
                            sampleBuffer.compact();
                            log.debug("Compacting sampleBuffer ...");
                            sampleBuffer.put(byteOne);

                        }

                    }

                } // end getRecordDelimiters().length

                // shift the bytes in the FIFO window
                byteFour = byteThree;
                byteThree = byteTwo;
                byteTwo = byteOne;

            } //end while (more unread bytes)

            // prepare the buffer to read in more bytes from the stream
            buffer.compact();

        } // end while (more socket bytes to read)
        socket.close();

    } catch (IOException e) {
        // handle exceptions
        // In the event of an i/o exception, log the exception, and allow execute()
        // to return false, which will prompt a retry.
        failed = true;
        log.error("There was a communication error in sending the data sample. The message was: "
                + e.getMessage());
        if (log.isDebugEnabled()) {
            e.printStackTrace();
        }
        return !failed;

    } catch (SAPIException sapie) {
        // In the event of an RBNB communication  exception, log the exception, 
        // and allow execute() to return false, which will prompt a retry.
        failed = true;
        log.error("There was an RBNB error while sending the data sample. The message was: "
                + sapie.getMessage());
        if (log.isDebugEnabled()) {
            sapie.printStackTrace();
        }
        return !failed;
    }

    return !failed;

}

From source file:hivemall.mf.BPRMatrixFactorizationUDTF.java

private final void runIterativeTraining(@Nonnegative final int iterations) throws HiveException {
    final ByteBuffer inputBuf = this.inputBuf;
    final NioFixedSegment fileIO = this.fileIO;
    assert (inputBuf != null);
    assert (fileIO != null);
    final long numTrainingExamples = count;

    final Reporter reporter = getReporter();
    final Counter iterCounter = (reporter == null) ? null
            : reporter.getCounter("hivemall.mf.BPRMatrixFactorization$Counter", "iteration");

    try {/*from w  w w.  j  a  va2s  .  com*/
        if (lastWritePos == 0) {// run iterations w/o temporary file
            if (inputBuf.position() == 0) {
                return; // no training example
            }
            inputBuf.flip();

            int iter = 2;
            for (; iter <= iterations; iter++) {
                reportProgress(reporter);
                setCounterValue(iterCounter, iter);

                while (inputBuf.remaining() > 0) {
                    int u = inputBuf.getInt();
                    int i = inputBuf.getInt();
                    int j = inputBuf.getInt();
                    // invoke train
                    count++;
                    train(u, i, j);
                }
                cvState.multiplyLoss(0.5d);
                cvState.logState(iter, eta());
                if (cvState.isConverged(iter, numTrainingExamples)) {
                    break;
                }
                if (cvState.isLossIncreased()) {
                    etaEstimator.update(1.1f);
                } else {
                    etaEstimator.update(0.5f);
                }
                inputBuf.rewind();
            }
            LOG.info("Performed " + Math.min(iter, iterations) + " iterations of "
                    + NumberUtils.formatNumber(numTrainingExamples) + " training examples on memory (thus "
                    + NumberUtils.formatNumber(count) + " training updates in total) ");
        } else {// read training examples in the temporary file and invoke train for each example

            // write training examples in buffer to a temporary file
            if (inputBuf.position() > 0) {
                writeBuffer(inputBuf, fileIO, lastWritePos);
            } else if (lastWritePos == 0) {
                return; // no training example
            }
            try {
                fileIO.flush();
            } catch (IOException e) {
                throw new HiveException("Failed to flush a file: " + fileIO.getFile().getAbsolutePath(), e);
            }
            if (LOG.isInfoEnabled()) {
                File tmpFile = fileIO.getFile();
                LOG.info(
                        "Wrote " + numTrainingExamples + " records to a temporary file for iterative training: "
                                + tmpFile.getAbsolutePath() + " (" + FileUtils.prettyFileSize(tmpFile) + ")");
            }

            // run iterations
            int iter = 2;
            for (; iter <= iterations; iter++) {
                setCounterValue(iterCounter, iter);

                inputBuf.clear();
                long seekPos = 0L;
                while (true) {
                    reportProgress(reporter);
                    // TODO prefetch
                    // writes training examples to a buffer in the temporary file
                    final int bytesRead;
                    try {
                        bytesRead = fileIO.read(seekPos, inputBuf);
                    } catch (IOException e) {
                        throw new HiveException("Failed to read a file: " + fileIO.getFile().getAbsolutePath(),
                                e);
                    }
                    if (bytesRead == 0) { // reached file EOF
                        break;
                    }
                    assert (bytesRead > 0) : bytesRead;
                    seekPos += bytesRead;

                    // reads training examples from a buffer
                    inputBuf.flip();
                    int remain = inputBuf.remaining();
                    assert (remain > 0) : remain;
                    for (; remain >= RECORD_BYTES; remain -= RECORD_BYTES) {
                        int u = inputBuf.getInt();
                        int i = inputBuf.getInt();
                        int j = inputBuf.getInt();
                        // invoke train
                        count++;
                        train(u, i, j);
                    }
                    inputBuf.compact();
                }
                cvState.multiplyLoss(0.5d);
                cvState.logState(iter, eta());
                if (cvState.isConverged(iter, numTrainingExamples)) {
                    break;
                }
                if (cvState.isLossIncreased()) {
                    etaEstimator.update(1.1f);
                } else {
                    etaEstimator.update(0.5f);
                }
            }
            LOG.info("Performed " + Math.min(iter, iterations) + " iterations of "
                    + NumberUtils.formatNumber(numTrainingExamples)
                    + " training examples using a secondary storage (thus " + NumberUtils.formatNumber(count)
                    + " training updates in total)");
        }
    } finally {
        // delete the temporary file and release resources
        try {
            fileIO.close(true);
        } catch (IOException e) {
            throw new HiveException("Failed to close a file: " + fileIO.getFile().getAbsolutePath(), e);
        }
        this.inputBuf = null;
        this.fileIO = null;
    }
}