List of usage examples for java.nio ByteBuffer rewind
public final Buffer rewind()
From source file:org.openhab.binding.irtrans.handler.EthernetBridgeHandler.java
protected void onWritable(ByteBuffer buffer) { lock.lock();/*from w w w . j av a 2 s . com*/ try { synchronized (selector) { try { selector.selectNow(); } catch (IOException e) { logger.error("An exception occurred while selecting: {}", e.getMessage()); } } Iterator<SelectionKey> it = selector.selectedKeys().iterator(); while (it.hasNext()) { SelectionKey selKey = it.next(); it.remove(); if (selKey.isValid() && selKey.isWritable()) { SocketChannel aSocketChannel = (SocketChannel) selKey.channel(); if (aSocketChannel.equals(socketChannel)) { boolean error = false; buffer.rewind(); try { logger.trace("Sending '{}' on the channel '{}'->'{}'", new String(buffer.array()), aSocketChannel.getLocalAddress(), aSocketChannel.getRemoteAddress()); aSocketChannel.write(buffer); } catch (NotYetConnectedException e) { logger.warn("The channel '{}' is not yet connected: {}", aSocketChannel, e.getMessage()); if (!aSocketChannel.isConnectionPending()) { error = true; } } catch (ClosedChannelException e) { // If some other I/O error occurs logger.warn("The channel for '{}' is closed: {}", aSocketChannel, e.getMessage()); error = true; } catch (IOException e) { // If some other I/O error occurs logger.warn("An IO exception occured on channel '{}': {}", aSocketChannel, e.getMessage()); error = true; } if (error) { try { aSocketChannel.close(); } catch (IOException e) { logger.warn("An exception occurred while closing the channel '{}': {}", aSocketChannel, e.getMessage()); } } } } } } finally { lock.unlock(); } }
From source file:org.apache.james.protocols.imap.core.IMAPCommandDispatcher.java
@Override protected Request parseRequest(IMAPSession session, ByteBuffer buffer) throws Exception { IMAPRequest request = new IMAPRequest(buffer); Matcher matcher = LITERAL_PATTERN.matcher(request.getArgument()); if (matcher.matches()) { final long bytesToRead = Long.parseLong(matcher.group(1)); MultiLineHandler<IMAPSession> handler = new MultiLineHandler<IMAPSession>() { private static final String BYTES_READ = "BYTES_READ"; @Override// w ww . j av a 2s .com public void init(Configuration config) throws ConfigurationException { } @Override public void destroy() { } /* * (non-Javadoc) * @see org.apache.james.protocols.api.handler.MultiLineHandler#isReady(org.apache.james.protocols.api.ProtocolSession, java.nio.ByteBuffer) */ protected boolean isReady(IMAPSession session, ByteBuffer line) { long bytesRead = (Long) session.setAttachment(BYTES_READ, null, State.Transaction); bytesRead += line.remaining(); if (bytesRead >= bytesToRead) { return true; } else { session.setAttachment(BYTES_READ, bytesRead, State.Transaction); return false; } } @Override protected Response onLines(IMAPSession session, Collection<ByteBuffer> lines) { session.popLineHandler(); return dispatchCommandHandlers(session, new IMAPRequest(lines)); } }; buffer.rewind(); // push the line to the handler handler.onLine(session, buffer); session.pushLineHandler(handler); return null; } else { return request; } }
From source file:com.yobidrive.diskmap.needles.NeedleManager.java
/** Loads the needle pointed by the needlePointer and checks for validity (checksum, ...) and returns the next linked needle * @param needlePointer/*from www . j av a 2 s . co m*/ * @param needle * @return a chained needle if the read is successful, otherwise null * @throws NeedleManagerException */ public Needle getNeedleFromDisk(NeedlePointer needlePointer) throws NeedleManagerException { ByteBuffer needleBuffer = null; try { FileChannel fc = getChannel(needlePointer.getNeedleFileNumber()); if (fc == null) return new Needle(); // Position and read needle for check long position = needlePointer.getNeedleOffset(); // Acquires a ByteBuffer if (threadBufferQ == null) return new Needle(); Chrono chr = new Chrono(); needleBuffer = threadBufferQ.take(); chr.lap("Wait for thread buffer ", 20); // Finally we have a buffer needleBuffer.rewind(); needleBuffer.limit(MAXKEYSIZE + MAXVERSIONSIZE + Needle.NEEDLEOVERHEAD); // First read header to know the data size int readBytes = 0, totalHeaderReadBytes = 0; while (readBytes >= 0 && totalHeaderReadBytes < needleBuffer.limit()) { readBytes = fc.read(needleBuffer, position + totalHeaderReadBytes); totalHeaderReadBytes += readBytes; } if (totalHeaderReadBytes <= 0) return new Needle(); Needle needle = new Needle(); if (!needle.getNeedleHeaderFromBuffer(needleBuffer)) { return new Needle(); // Incorrect header } // Needle Header is OK, read the rest until end of needle. Change limit to include data // needleBuffer.rewind() ; needleBuffer.position(totalHeaderReadBytes); // needleBuffer.limit(needle.getPostDataSize()) ; needleBuffer.limit(needle.getTotalSizeFromData()); readBytes = 0; int totalContentReadBytes = 0; while (readBytes >= 0 && totalContentReadBytes < needleBuffer.limit() - totalHeaderReadBytes) { readBytes = fc.read(needleBuffer, position + totalHeaderReadBytes + totalContentReadBytes); totalContentReadBytes += readBytes; } // readBytes = fc.read(needleBuffer, position+needle.getHeaderSize()) ; // Parse data and verifies checksum // needleBuffer.rewind(); needleBuffer.position(needle.getHeaderSize()); if (!needle.getNeedleDataFromBuffer(needleBuffer)) return new Needle(); // Now needle is parsed and OK chr.total("Read from disk ", 20); return needle; } catch (Throwable th) { logger.error("Error reading needle at " + needlePointer.getFormattedNeedleFileNumber() + "/" + needlePointer.getFormattedNeedleOffset(), th); throw new NeedleManagerException(); } finally { if (needleBuffer != null) { try { threadBufferQ.put(needleBuffer); } catch (InterruptedException ie) { throw new BucketTableManagerException("Error giving back needle read thread", ie); } } } }
From source file:hivemall.recommend.SlimUDTF.java
private void runIterativeTraining() throws HiveException { final ByteBuffer buf = this._inputBuf; final NioStatefulSegment dst = this._fileIO; assert (buf != null); assert (dst != null); final Reporter reporter = getReporter(); final Counters.Counter iterCounter = (reporter == null) ? null : reporter.getCounter("hivemall.recommend.slim$Counter", "iteration"); try {//from w w w .j a va 2s. c om if (dst.getPosition() == 0L) {// run iterations w/o temporary file if (buf.position() == 0) { return; // no training example } buf.flip(); for (int iter = 2; iter < numIterations; iter++) { _cvState.next(); reportProgress(reporter); setCounterValue(iterCounter, iter); while (buf.remaining() > 0) { int recordBytes = buf.getInt(); assert (recordBytes > 0) : recordBytes; replayTrain(buf); } buf.rewind(); if (_cvState.isConverged(_observedTrainingExamples)) { break; } } logger.info("Performed " + _cvState.getCurrentIteration() + " iterations of " + NumberUtils.formatNumber(_observedTrainingExamples) + " training examples on memory (thus " + NumberUtils.formatNumber(_observedTrainingExamples * _cvState.getCurrentIteration()) + " training updates in total) "); } else { // read training examples in the temporary file and invoke train for each example // write KNNi in buffer to a temporary file if (buf.remaining() > 0) { writeBuffer(buf, dst); } try { dst.flush(); } catch (IOException e) { throw new HiveException("Failed to flush a file: " + dst.getFile().getAbsolutePath(), e); } if (logger.isInfoEnabled()) { File tmpFile = dst.getFile(); logger.info("Wrote KNN entries of axis items to a temporary file for iterative training: " + tmpFile.getAbsolutePath() + " (" + FileUtils.prettyFileSize(tmpFile) + ")"); } // run iterations for (int iter = 2; iter < numIterations; iter++) { _cvState.next(); setCounterValue(iterCounter, iter); buf.clear(); dst.resetPosition(); while (true) { reportProgress(reporter); // load a KNNi to a buffer in the temporary file final int bytesRead; try { bytesRead = dst.read(buf); } catch (IOException e) { throw new HiveException("Failed to read a file: " + dst.getFile().getAbsolutePath(), e); } if (bytesRead == 0) { // reached file EOF break; } assert (bytesRead > 0) : bytesRead; // reads training examples from a buffer buf.flip(); int remain = buf.remaining(); if (remain < SizeOf.INT) { throw new HiveException("Illegal file format was detected"); } while (remain >= SizeOf.INT) { int pos = buf.position(); int recordBytes = buf.getInt(); remain -= SizeOf.INT; if (remain < recordBytes) { buf.position(pos); break; } replayTrain(buf); remain -= recordBytes; } buf.compact(); } if (_cvState.isConverged(_observedTrainingExamples)) { break; } } logger.info("Performed " + _cvState.getCurrentIteration() + " iterations of " + NumberUtils.formatNumber(_observedTrainingExamples) + " training examples on memory and KNNi data on secondary storage (thus " + NumberUtils.formatNumber(_observedTrainingExamples * _cvState.getCurrentIteration()) + " training updates in total) "); } } catch (Throwable e) { throw new HiveException("Exception caused in the iterative training", e); } finally { // delete the temporary file and release resources try { dst.close(true); } catch (IOException e) { throw new HiveException("Failed to close a file: " + dst.getFile().getAbsolutePath(), e); } this._inputBuf = null; this._fileIO = null; } }
From source file:edu.harvard.iq.dvn.ingest.dsb.impl.DvnNewJavaFieldCutter.java
public void cutColumns(InputStream in, int noCardsPerCase, int caseLength, String delimitor, String tabFileName) throws IOException { if (delimitor == null) { delimitor = defaultDelimitor;//from w w w. j av a2s. c om } OUT_LEN = colwidth; // calculated by parseList dbgLog.fine("out_len=" + OUT_LEN); String firstline = null; if (caseLength == 0) { int cread; int ccounter = 0; firstline = ""; while (caseLength == 0 && (cread = in.read()) != -1) { ccounter++; if (cread == '\n') { caseLength = ccounter; } char c = (char) cread; firstline = firstline + c; } } if (caseLength == 0) { throw new IOException("Subsetting failed: could not read incoming byte stream. " + "(Requested file may be unavailable or missing)"); } REC_LEN = caseLength; dbgLog.fine("REC_LEN=" + REC_LEN); for (int i = 0; i < cargSet.get(Long.valueOf(noCardsPerCase)).size(); i++) { int varEndOffset = cargSet.get(Long.valueOf(noCardsPerCase)).get(i).get(1); if (REC_LEN <= varEndOffset + 1) { throw new IOException("Failed to subset incoming byte stream. Invalid input. " + "(Detected the first record of " + REC_LEN + " bytes; " + "one of the columns requested ends at " + varEndOffset + " bytes)."); } } Boolean dottednotation = false; Boolean foundData = false; // cutting a data file ReadableByteChannel rbc = Channels.newChannel(in); // input byte-buffer size = row-length + 1(=> new line char) ByteBuffer inbuffer = ByteBuffer.allocate(REC_LEN); OutputStream outs = new FileOutputStream(tabFileName); WritableByteChannel outc = Channels.newChannel(outs); ByteBuffer outbuffer = null; int pos = 0; int offset = 0; int outoffset = 0; int begin = 0; int end = 0; int blankoffset = 0; int blanktail = 0; int k; try { // lc: line counter int lc = 0; while (firstline != null || rbc.read(inbuffer) != -1) { if (firstline != null) { // we have the first line saved as a String: inbuffer.put(firstline.getBytes()); firstline = null; } // calculate i-th card number lc++; k = lc % noCardsPerCase; if (k == 0) { k = noCardsPerCase; } //out.println("***** " +lc+ "-th line, recod k=" + k + " *****"); byte[] line_read = new byte[OUT_LEN]; byte[] junk = new byte[REC_LEN]; byte[] line_final = new byte[OUT_LEN]; //out.println("READ: " + offset); inbuffer.rewind(); offset = 0; outoffset = 0; // how many variables are cut from this k-th card int noColumns = cargSet.get(Long.valueOf(k)).size(); //out.println("noColumns=" + noColumns); //out.println("cargSet k =" + cargSet.get(Long.valueOf(k))); for (int i = 0; i < noColumns; i++) { //out.println("**** " + i +"-th col ****"); begin = cargSet.get(Long.valueOf(k)).get(i).get(0); // bounds[2 * i]; end = cargSet.get(Long.valueOf(k)).get(i).get(1); // bounds[2 * i + 1]; //out.println("i: begin: " + begin + "\ti: end:" + end); try { // throw away offect bytes if (begin - offset - 1 > 0) { inbuffer.get(junk, 0, (begin - offset - 1)); } // get requested bytes inbuffer.get(line_read, outoffset, (end - begin + 1)); // set outbound data outbounds[2 * i] = outoffset; outbounds[2 * i + 1] = outoffset + (end - begin); // current position moved to outoffset pos = outoffset; dottednotation = false; foundData = false; blankoffset = 0; blanktail = 0; // as position increases while (pos <= (outoffset + (end - begin))) { //out.println("pos=" + pos + "\tline_read[pos]=" + // new String(line_read).replace("\000", "\052")); // decimal octal // 48 =>0 60 // 46 => . 56 // 32 = space 40 // dot: if (line_read[pos] == '\056') { dottednotation = true; } // space: if (line_read[pos] == '\040') { if (foundData) { blanktail = blanktail > 0 ? blanktail : pos - 1; } else { blankoffset = pos + 1; } } else { foundData = true; blanktail = 0; } pos++; } // increase the outoffset by width outoffset += (end - begin + 1); // dot false if (!dottednotation) { if (blankoffset > 0) { // set outbound value to blankoffset outbounds[2 * i] = blankoffset; } if (blanktail > 0) { outbounds[2 * i + 1] = blanktail; } } } catch (BufferUnderflowException bufe) { //bufe.printStackTrace(); throw new IOException(bufe.getMessage()); } // set offset to the value of end-position offset = end; } outoffset = 0; // for each var for (int i = 0; i < noColumns; i++) { begin = outbounds[2 * i]; end = outbounds[2 * i + 1]; //out.println("begin=" + begin + "\t end=" + end); for (int j = begin; j <= end; j++) { line_final[outoffset++] = line_read[j]; } if (i < (noColumns - 1)) { line_final[outoffset++] = '\011'; // tab x09 } else { if (k == cargSet.size()) { line_final[outoffset++] = '\012'; // LF x0A } else { line_final[outoffset++] = '\011'; // tab x09 } } } //out.println("line_final=" + // new String(line_final).replace("\000", "\052")); outbuffer = ByteBuffer.wrap(line_final, 0, outoffset); outc.write(outbuffer); inbuffer.clear(); } // while loop } catch (IOException ex) { //ex.printStackTrace(); throw new IOException("Failed to subset incoming fixed-field stream: " + ex.getMessage()); } }
From source file:hivemall.mf.OnlineMatrixFactorizationUDTF.java
protected final void runIterativeTraining(@Nonnegative final int iterations) throws HiveException { final ByteBuffer inputBuf = this.inputBuf; final NioFixedSegment fileIO = this.fileIO; assert (inputBuf != null); assert (fileIO != null); final long numTrainingExamples = count; final Reporter reporter = getReporter(); final Counter iterCounter = (reporter == null) ? null : reporter.getCounter("hivemall.mf.MatrixFactorization$Counter", "iteration"); try {/*from w w w . j av a2s . c om*/ if (lastWritePos == 0) {// run iterations w/o temporary file if (inputBuf.position() == 0) { return; // no training example } inputBuf.flip(); int iter = 2; for (; iter <= iterations; iter++) { reportProgress(reporter); setCounterValue(iterCounter, iter); while (inputBuf.remaining() > 0) { int user = inputBuf.getInt(); int item = inputBuf.getInt(); double rating = inputBuf.getDouble(); // invoke train count++; train(user, item, rating); } cvState.multiplyLoss(0.5d); if (cvState.isConverged(iter, numTrainingExamples)) { break; } inputBuf.rewind(); } logger.info("Performed " + Math.min(iter, iterations) + " iterations of " + NumberUtils.formatNumber(numTrainingExamples) + " training examples on memory (thus " + NumberUtils.formatNumber(count) + " training updates in total) "); } else {// read training examples in the temporary file and invoke train for each example // write training examples in buffer to a temporary file if (inputBuf.position() > 0) { writeBuffer(inputBuf, fileIO, lastWritePos); } else if (lastWritePos == 0) { return; // no training example } try { fileIO.flush(); } catch (IOException e) { throw new HiveException("Failed to flush a file: " + fileIO.getFile().getAbsolutePath(), e); } if (logger.isInfoEnabled()) { File tmpFile = fileIO.getFile(); logger.info( "Wrote " + numTrainingExamples + " records to a temporary file for iterative training: " + tmpFile.getAbsolutePath() + " (" + FileUtils.prettyFileSize(tmpFile) + ")"); } // run iterations int iter = 2; for (; iter <= iterations; iter++) { setCounterValue(iterCounter, iter); inputBuf.clear(); long seekPos = 0L; while (true) { reportProgress(reporter); // TODO prefetch // writes training examples to a buffer in the temporary file final int bytesRead; try { bytesRead = fileIO.read(seekPos, inputBuf); } catch (IOException e) { throw new HiveException("Failed to read a file: " + fileIO.getFile().getAbsolutePath(), e); } if (bytesRead == 0) { // reached file EOF break; } assert (bytesRead > 0) : bytesRead; seekPos += bytesRead; // reads training examples from a buffer inputBuf.flip(); int remain = inputBuf.remaining(); assert (remain > 0) : remain; for (; remain >= RECORD_BYTES; remain -= RECORD_BYTES) { int user = inputBuf.getInt(); int item = inputBuf.getInt(); double rating = inputBuf.getDouble(); // invoke train count++; train(user, item, rating); } inputBuf.compact(); } cvState.multiplyLoss(0.5d); if (cvState.isConverged(iter, numTrainingExamples)) { break; } } logger.info("Performed " + Math.min(iter, iterations) + " iterations of " + NumberUtils.formatNumber(numTrainingExamples) + " training examples using a secondary storage (thus " + NumberUtils.formatNumber(count) + " training updates in total)"); } } finally { // delete the temporary file and release resources try { fileIO.close(true); } catch (IOException e) { throw new HiveException("Failed to close a file: " + fileIO.getFile().getAbsolutePath(), e); } this.inputBuf = null; this.fileIO = null; } }
From source file:ffx.crystal.CCP4MapWriter.java
/** * write data to file, does not normalize * * @param data map data to write out/*w w w .j a v a 2s. com*/ * @param norm should the data be normalized by mean/sd? */ public void write(double data[], boolean norm) { ByteOrder b = ByteOrder.nativeOrder(); FileOutputStream fos; DataOutputStream dos; double min = Double.POSITIVE_INFINITY; double max = Double.NEGATIVE_INFINITY; double mean = 0.0; double sd = 0.0; int n = 0; for (int k = 0; k < extz; k++) { for (int j = 0; j < exty; j++) { for (int i = 0; i < extx; i++) { int index = stride * (i + extx * (j + exty * k)); // int index = k * (exty * (extx + 2)) + j * (extx + 2) + i; n++; if (data[index] < min) { min = data[index]; } if (data[index] > max) { max = data[index]; } mean += (data[index] - mean) / n; } } } n = 0; for (int k = 0; k < extz; k++) { for (int j = 0; j < exty; j++) { for (int i = 0; i < extx; i++) { int index = stride * (i + extx * (j + exty * k)); // int index = k * (exty * (extx + 2)) + j * (extx + 2) + i; sd += pow(data[index] - mean, 2.0); n++; } } } sd = sqrt(sd / n); if (norm) { for (int k = 0; k < extz; k++) { for (int j = 0; j < exty; j++) { for (int i = 0; i < extx; i++) { int index = stride * (i + extx * (j + exty * k)); data[index] = (data[index] - mean) / sd; } } } // recurse write(data, false); } try { if (logger.isLoggable(Level.INFO)) { StringBuilder sb = new StringBuilder(); sb.append(String.format("\nwriting CCP4 map file: \"%s\"\n", filename)); sb.append(String.format("map min: %g max: %g mean: %g standard dev.: %g", min, max, mean, sd)); logger.info(sb.toString()); } fos = new FileOutputStream(filename); dos = new DataOutputStream(fos); byte bytes[] = new byte[2048]; int offset = 0; int imapdata; float fmapdata; String mapstr; // header ByteBuffer bb = ByteBuffer.wrap(bytes); bb.order(b).putInt(extx); bb.order(b).putInt(exty); bb.order(b).putInt(extz); // mode (2 = reals, only one we accept) bb.order(b).putInt(2); bb.order(b).putInt(orix); bb.order(b).putInt(oriy); bb.order(b).putInt(oriz); bb.order(b).putInt(nx); bb.order(b).putInt(ny); bb.order(b).putInt(nz); bb.order(b).putFloat((float) crystal.a); bb.order(b).putFloat((float) crystal.b); bb.order(b).putFloat((float) crystal.c); bb.order(b).putFloat((float) crystal.alpha); bb.order(b).putFloat((float) crystal.beta); bb.order(b).putFloat((float) crystal.gamma); bb.order(b).putInt(1); bb.order(b).putInt(2); bb.order(b).putInt(3); bb.order(b).putFloat((float) min); bb.order(b).putFloat((float) max); bb.order(b).putFloat((float) mean); bb.order(b).putInt(crystal.spaceGroup.number); // bb.order(b).putInt(1); // symmetry bytes - should set this up at some point // imapdata = swap ? ByteSwap.swap(320) : 320; bb.order(b).putInt(80); bb.order(b).putInt(0); for (int i = 0; i < 12; i++) { bb.order(b).putFloat(0.0f); } for (int i = 0; i < 15; i++) { bb.order(b).putInt(0); } dos.write(bytes, offset, 208); bb.rewind(); mapstr = "MAP "; dos.writeBytes(mapstr); // machine code: double, float, int, uchar // 0x4441 for LE, 0x1111 for BE if (ByteOrder.nativeOrder().equals(ByteOrder.LITTLE_ENDIAN)) { imapdata = 0x4441; } else { imapdata = 0x1111; } bb.order(b).putInt(imapdata); bb.order(b).putFloat((float) sd); bb.order(b).putInt(1); dos.write(bytes, offset, 12); StringBuilder sb = new StringBuilder(); sb.append("map data from ffx"); while (sb.length() < 80) { sb.append(" "); } dos.writeBytes(sb.toString()); sb = new StringBuilder(); while (sb.length() < 80) { sb.append(" "); } for (int i = 0; i < 9; i++) { dos.writeBytes(sb.toString()); } sb = new StringBuilder(); sb.append("x,y,z"); while (sb.length() < 80) { sb.append(" "); } dos.writeBytes(sb.toString()); bb.rewind(); for (int k = 0; k < extz; k++) { for (int j = 0; j < exty; j++) { for (int i = 0; i < extx; i++) { int index = stride * (i + extx * (j + exty * k)); // int index = k * (exty * (extx + 2)) + j * (extx + 2) + i; fmapdata = (float) data[index]; bb.order(b).putFloat(fmapdata); if (!bb.hasRemaining()) { dos.write(bytes); bb.rewind(); } } } } if (bb.position() > 0) { dos.write(bytes); bb.rewind(); } dos.close(); } catch (Exception e) { String message = "Fatal exception evaluating structure factors.\n"; logger.log(Level.SEVERE, message, e); System.exit(-1); } }
From source file:edu.harvard.iq.dataverse.ingest.tabulardata.impl.plugins.por.PORFileReaderSpi.java
@Override public boolean canDecodeInput(BufferedInputStream stream) throws IOException { if (stream == null) { throw new IllegalArgumentException("file == null!"); }/* ww w .j a va 2 s .c o m*/ dbgLog.fine("applying the por test\n"); byte[] b = new byte[POR_HEADER_SIZE]; if (stream.markSupported()) { stream.mark(0); } int nbytes = stream.read(b, 0, POR_HEADER_SIZE); //printHexDump(b, "hex dump of the byte-array"); if (nbytes == 0) { throw new IOException(); } else if (nbytes < 491) { // size test dbgLog.fine("this file is NOT spss-por type"); return false; } if (stream.markSupported()) { stream.reset(); } boolean DEBUG = false; //windows [0D0A]=> [1310] = [CR/LF] //unix [0A] => [10] //mac [0D] => [13] // 3char [0D0D0A]=> [131310] spss for windows rel 15 // expected results // unix case: [0A] : [80], [161], [242], [323], [404], [485] // windows case: [0D0A] : [81], [163], [245], [327], [409], [491] // : [0D0D0A] : [82], [165], [248], [331], [414], [495] // convert b into a ByteBuffer ByteBuffer buff = ByteBuffer.wrap(b); byte[] nlch = new byte[36]; int pos1; int pos2; int pos3; int ucase = 0; int wcase = 0; int mcase = 0; int three = 0; int nolines = 6; int nocols = 80; for (int i = 0; i < nolines; ++i) { int baseBias = nocols * (i + 1); // 1-char case pos1 = baseBias + i; buff.position(pos1); dbgLog.finer("\tposition(1)=" + buff.position()); int j = 6 * i; nlch[j] = buff.get(); if (nlch[j] == 10) { ucase++; } else if (nlch[j] == 13) { mcase++; } // 2-char case pos2 = baseBias + 2 * i; buff.position(pos2); dbgLog.finer("\tposition(2)=" + buff.position()); nlch[j + 1] = buff.get(); nlch[j + 2] = buff.get(); // 3-char case pos3 = baseBias + 3 * i; buff.position(pos3); dbgLog.finer("\tposition(3)=" + buff.position()); nlch[j + 3] = buff.get(); nlch[j + 4] = buff.get(); nlch[j + 5] = buff.get(); dbgLog.finer(i + "-th iteration position =" + nlch[j] + "\t" + nlch[j + 1] + "\t" + nlch[j + 2]); dbgLog.finer(i + "-th iteration position =" + nlch[j + 3] + "\t" + nlch[j + 4] + "\t" + nlch[j + 5]); if ((nlch[j + 3] == 13) && (nlch[j + 4] == 13) && (nlch[j + 5] == 10)) { three++; } else if ((nlch[j + 1] == 13) && (nlch[j + 2] == 10)) { wcase++; } buff.rewind(); } if (three == nolines) { dbgLog.fine("0D0D0A case"); windowsNewLine = false; } else if ((ucase == nolines) && (wcase < nolines)) { dbgLog.fine("0A case"); windowsNewLine = false; } else if ((ucase < nolines) && (wcase == nolines)) { dbgLog.fine("0D0A case"); } else if ((mcase == nolines) && (wcase < nolines)) { dbgLog.fine("0D case"); windowsNewLine = false; } buff.rewind(); int PORmarkPosition = POR_MARK_POSITION_DEFAULT; if (windowsNewLine) { PORmarkPosition = PORmarkPosition + 5; } else if (three == nolines) { PORmarkPosition = PORmarkPosition + 10; } byte[] pormark = new byte[8]; buff.position(PORmarkPosition); buff.get(pormark, 0, 8); String pormarks = new String(pormark); //dbgLog.fine("pormark =>" + pormarks + "<-"); dbgLog.fine( "pormark[hex: 53 50 53 53 50 4F 52 54 == SPSSPORT] =>" + new String(Hex.encodeHex(pormark)) + "<-"); if (pormarks.equals(POR_MARK)) { dbgLog.fine("this file is spss-por type"); return true; } else { dbgLog.fine("this file is NOT spss-por type"); } return false; }
From source file:edu.harvard.iq.dataverse.ingest.tabulardata.impl.plugins.por.PORFileReaderSpi.java
@Override public boolean canDecodeInput(Object source) throws IOException { if (!(source instanceof BufferedInputStream)) { return false; }/*from w w w .j a v a2s. c om*/ if (source == null) { throw new IllegalArgumentException("source == null!"); } BufferedInputStream stream = (BufferedInputStream) source; dbgLog.fine("applying the por test\n"); byte[] b = new byte[POR_HEADER_SIZE]; if (stream.markSupported()) { stream.mark(0); } int nbytes = stream.read(b, 0, POR_HEADER_SIZE); //printHexDump(b, "hex dump of the byte-array"); if (nbytes == 0) { throw new IOException(); } else if (nbytes < 491) { // size test dbgLog.fine("this file is NOT spss-por type"); return false; } if (stream.markSupported()) { stream.reset(); } boolean DEBUG = false; //windows [0D0A]=> [1310] = [CR/LF] //unix [0A] => [10] //mac [0D] => [13] // 3char [0D0D0A]=> [131310] spss for windows rel 15 // expected results // unix case: [0A] : [80], [161], [242], [323], [404], [485] // windows case: [0D0A] : [81], [163], [245], [327], [409], [491] // : [0D0D0A] : [82], [165], [248], [331], [414], [495] // convert b into a ByteBuffer ByteBuffer buff = ByteBuffer.wrap(b); byte[] nlch = new byte[36]; int pos1; int pos2; int pos3; int ucase = 0; int wcase = 0; int mcase = 0; int three = 0; int nolines = 6; int nocols = 80; for (int i = 0; i < nolines; ++i) { int baseBias = nocols * (i + 1); // 1-char case pos1 = baseBias + i; buff.position(pos1); dbgLog.finer("\tposition(1)=" + buff.position()); int j = 6 * i; nlch[j] = buff.get(); if (nlch[j] == 10) { ucase++; } else if (nlch[j] == 13) { mcase++; } // 2-char case pos2 = baseBias + 2 * i; buff.position(pos2); dbgLog.finer("\tposition(2)=" + buff.position()); nlch[j + 1] = buff.get(); nlch[j + 2] = buff.get(); // 3-char case pos3 = baseBias + 3 * i; buff.position(pos3); dbgLog.finer("\tposition(3)=" + buff.position()); nlch[j + 3] = buff.get(); nlch[j + 4] = buff.get(); nlch[j + 5] = buff.get(); dbgLog.finer(i + "-th iteration position =" + nlch[j] + "\t" + nlch[j + 1] + "\t" + nlch[j + 2]); dbgLog.finer(i + "-th iteration position =" + nlch[j + 3] + "\t" + nlch[j + 4] + "\t" + nlch[j + 5]); if ((nlch[j + 3] == 13) && (nlch[j + 4] == 13) && (nlch[j + 5] == 10)) { three++; } else if ((nlch[j + 1] == 13) && (nlch[j + 2] == 10)) { wcase++; } buff.rewind(); } if (three == nolines) { dbgLog.fine("0D0D0A case"); windowsNewLine = false; } else if ((ucase == nolines) && (wcase < nolines)) { dbgLog.fine("0A case"); windowsNewLine = false; } else if ((ucase < nolines) && (wcase == nolines)) { dbgLog.fine("0D0A case"); } else if ((mcase == nolines) && (wcase < nolines)) { dbgLog.fine("0D case"); windowsNewLine = false; } buff.rewind(); int PORmarkPosition = POR_MARK_POSITION_DEFAULT; if (windowsNewLine) { PORmarkPosition = PORmarkPosition + 5; } else if (three == nolines) { PORmarkPosition = PORmarkPosition + 10; } byte[] pormark = new byte[8]; buff.position(PORmarkPosition); buff.get(pormark, 0, 8); String pormarks = new String(pormark); dbgLog.fine( "pormark[hex: 53 50 53 53 50 4F 52 54 == SPSSPORT] =>" + new String(Hex.encodeHex(pormark)) + "<-"); if (pormarks.equals(POR_MARK)) { dbgLog.fine("this file is spss-por type"); return true; } else { dbgLog.fine("this file is NOT spss-por type"); } return false; }
From source file:org.apache.hadoop.raid.JRSEncoder.java
protected void encodeStripe(InputStream[] blocks, long stripeStartOffset, long blockSize, byte[][] bufs, Progressable reporter) throws IOException { try {/*from www . j a v a2 s. c om*/ //trigger, pass file info for (int i = 0; i < threadNum; i++) fq[i].put(bufs); } catch (InterruptedException e) { } //seq number int s = 0; //number of data read int read = 0; //useless int cap = 1 + 11 * threadNum; //ByteBuffer[] buf = new ByteBuffer[cap]; //use buffer to pass data, can be replaced by Byte[] ByteBuffer buf; while (read < blockSize) { //indecate the last threadNum# packet boolean important = false; //useless int idx = s % cap; //if(buf[idx] == null) buf[idx] = ByteBuffer.allocate(bufSize*stripeSize+5); //initial buffer buf = ByteBuffer.allocate(bufSize * stripeSize + 64); //buf[idx].putInt(0, s); //seq number buf.putInt(stripeSize * bufSize, s); //check whether the last threadNum# packet if ((blockSize - read + bufSize - 1) / bufSize <= threadNum) { important = true; //buf[idx].put(4, (byte)1); buf.put(4 + stripeSize * bufSize, (byte) 1); } else { //buf[idx].put(4, (byte)0); buf.put(4 + stripeSize * bufSize, (byte) 0); } byte[] bufarr = buf.array(); LOG.info("anchor Encode_stripe " + s + " Data_start_reading " + System.nanoTime()); for (int i = 0; i < stripeSize; i++) { try { //RaidUtils.readTillEnd(blocks[i], buf[idx].array(), true, 5+i*bufSize, bufSize); //read data RaidUtils.readTillEnd(blocks[i], bufarr, true, i * bufSize, bufSize); } catch (IOException e) { } } //LOG.info(s+" read: "+bufarr[5]+" "+bufarr[5+bufSize]+" "+bufarr[5+bufSize*2]); LOG.info("anchor Encode_stripe " + s + " Data_read " + System.nanoTime()); //buf[idx].rewind(); //update position buf.rewind(); int remain = -1; int chosen = -1; //check the most free ring buffer for (int i = 0; i < threadNum; i++) { int rc = q[i].remainingCapacity(); if (remain < rc) { remain = rc; chosen = i; } } //decide to put the data to which ring buffer if (important) { chosen = (((int) blockSize - read + bufSize - 1) / bufSize - 1) % threadNum; } //LOG.info("chosen number: "+chosen+" with seq: "+s+" and buf idx: "+idx); try { //out[chosen].put(buf[idx]); q[chosen].put(buf); } catch (InterruptedException e) { } LOG.info("anchor Encode_stripe " + s + " Data_pushed " + System.nanoTime()); //update status s++; read += bufSize; } }