List of usage examples for java.nio ByteBuffer putInt
public abstract ByteBuffer putInt(int index, int value);
From source file:org.apache.hadoop.raid.PMDecoder.java
void writeFixedBlock(FSDataInputStream[] inputs, int[] erasedLocations, int[] validErasedLocations, long limit, byte[] outBuf, PMStreamFactory sf) throws IOException { int seq = 0;//from www . ja va2 s. c o m for (long read = 0; read < limit;) { int failNum = validErasedLocations.length; int bufOffset = encodedBufSize * (stripeSize + paritySize - failNum); ByteBuffer buf = ByteBuffer.allocate(bufOffset + 64); buf.putInt(bufOffset, seq); boolean important = false; //last threadNum# packet checked if ((limit - read + bufSize - 1) / bufSize <= threadNum) { important = true; buf.put(bufOffset + 4, (byte) 1); } else { buf.put(bufOffset + 4, (byte) 0); } LOG.info("anchor Decode_stripe " + seq + " Data_reading " + System.nanoTime()); //read packets buf.rewind(); validErasedLocations = readFromInputs(inputs, validErasedLocations, buf, sf, seq); LOG.info("anchor Decode_stripe " + seq + " Data_read " + System.nanoTime()); buf.rewind(); int toRead = (int) Math.min((long) bufSize, limit - read); //finding the best ring buffer int remain = -1; int chosen = -1; for (int i = 0; i < threadNum; i++) { int rc = q[i].remainingCapacity(); if (remain < rc) { remain = rc; chosen = i; } } if (important) { chosen = (int) (((limit - read + bufSize - 1) / bufSize - 1) % threadNum); } DecodePackage dp = (new DecodePackage(erasedLocations, validErasedLocations, buf)).limit(limit) .outputBuffer(outBuf); //dispatch boolean flag = true; while (flag) { flag = false; try { q[chosen].put(dp); } catch (InterruptedException e) { Thread.currentThread().interrupt(); flag = true; } } LOG.info("anchor Decode_stripe " + seq + " Data_pushed " + System.nanoTime()); seq++; read += toRead; } //waiting for the end of the decode for (int i = 0; i < threadNum; i++) { boolean flag = true; while (flag) { flag = false; try { p[i].take(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); flag = true; } } } }
From source file:org.apache.hadoop.raid.PMDecoder.java
void writeFixedBlock(FSDataInputStream[] inputs, int[] erasedLocations, int[] validErasedLocations, Map<Integer, LocatedBlock> corruptStripe, File[] lbfs, long[] limits, PMStreamFactory sf) throws IOException { long limit = 0; for (int i = 0; i < limits.length; i++) if (limit < limits[i]) limit = limits[i];//from w ww.j a v a 2 s. c om int seq = 0; for (long read = 0; read < limit;) { int failNum = validErasedLocations.length; int bufOffset = encodedBufSize * (stripeSize + paritySize - failNum); ByteBuffer buf = ByteBuffer.allocate(bufOffset + 64); buf.putInt(bufOffset, seq); //LOG.info("bufOffset: "+bufOffset+"encodedBufSize: "+encodedBufSize); boolean important = false; //last threadNum# packet checked if ((limit - read + bufSize - 1) / bufSize <= threadNum) { important = true; buf.put(bufOffset + 4, (byte) 1); } else { buf.put(bufOffset + 4, (byte) 0); } LOG.info("anchor Decode_stripe " + seq + " Data_reading " + System.nanoTime()); //read packets buf.rewind(); validErasedLocations = readFromInputs(inputs, validErasedLocations, buf, sf, seq); LOG.info("anchor Decode_stripe " + seq + " Data_read " + System.nanoTime()); int toRead = (int) Math.min((long) bufSize, limit - read); buf.rewind(); //finding the best ring buffer int remain = -1; int chosen = -1; for (int i = 0; i < threadNum; i++) { int rc = q[i].remainingCapacity(); if (remain < rc) { remain = rc; chosen = i; } } if (important) { chosen = (int) ((((limit - read) + bufSize - 1) / bufSize - 1) % threadNum); } DecodePackage dp = (new DecodePackage(erasedLocations, validErasedLocations, buf)).limits(limits) .localFiles(lbfs); //dispatch boolean flag = true; while (flag) { flag = false; try { q[chosen].put(dp); } catch (InterruptedException e) { Thread.currentThread().interrupt(); flag = true; } } LOG.info("anchor Decode_stripe " + seq + " Data_pushed " + System.nanoTime()); seq++; read += toRead; } //waiting for the end of the decode for (int i = 0; i < threadNum; i++) { boolean flag = true; while (flag) { flag = false; try { p[i].take(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); flag = true; } } } }
From source file:org.apache.hadoop.raid.IADecoder.java
ReadPackage readFromInputs(FSDataInputStream[] inputs, int[] validErasedLocations, IAStreamFactory sf, int seq) throws IOException { boolean flag = true; while (flag) { flag = false;//from w w w.j a va2s . c o m // For every input, read some data = bufSize for (int i = 0, j = 0; i < inputs.length; i++) { if (j >= validErasedLocations.length || i != validErasedLocations[j]) { try { LOG.info("read input:" + i + " encoded bs:" + encodedBufSize + " " + System.nanoTime()); RaidUtils.readTillEnd(inputs[i], readBufs[i], encodedBufSize, true); continue; } catch (BlockMissingException e) { LOG.error("Encountered BlockMissingException in stream " + i); } catch (ChecksumException e) { LOG.error("Encountered ChecksumException in stream " + i); } } else { j++; continue; } // too many fails if (validErasedLocations.length == paritySize) { String msg = "Too many read errors"; LOG.error(msg); throw new IOException(msg); } // read fail, need to rebuild the stream. int[] newErasedLocations = new int[validErasedLocations.length + 1]; for (int k = 0; k < validErasedLocations.length; k++) { newErasedLocations[k] = validErasedLocations[k]; } newErasedLocations[newErasedLocations.length - 1] = i; int[] temp = new int[stripeSize + paritySize + 1]; iaValidate(stripeSize, paritySize, newErasedLocations.length, newErasedLocations, temp); LOG.info("iaValidate pass 3"); validErasedLocations = new int[temp[0]]; encodedBufSize = bufSize * validErasedLocations.length / paritySize; System.arraycopy(temp, 1, validErasedLocations, 0, temp[0]); Arrays.sort(validErasedLocations); sf.closeStreams(inputs); sf.buildStream(inputs, validErasedLocations, seq * bufSize); //reset startTime = System.nanoTime(); flag = true; break; } } int failNum = validErasedLocations.length; int bufOffset = encodedBufSize * (stripeSize + paritySize - failNum); ByteBuffer buf = ByteBuffer.allocate(bufOffset + 64); buf.putInt(bufOffset, seq); buf.rewind(); LOG.info("end read encoded bs:" + encodedBufSize + " " + System.nanoTime()); for (int i = 0, j = 0; i < inputs.length; i++) if (j >= validErasedLocations.length || i != validErasedLocations[j]) buf.put(readBufs[i], 0, encodedBufSize); else j++; return new ReadPackage(validErasedLocations, buf); }
From source file:edu.mbl.jif.imaging.mmtiff.MultipageTiffWriter.java
private ByteBuffer getResolutionValuesBuffer() throws IOException { ByteBuffer buffer = ByteBuffer.allocate(16).order(BYTE_ORDER); buffer.putInt(0, (int) resNumerator_); buffer.putInt(4, (int) resDenomenator_); buffer.putInt(8, (int) resNumerator_); buffer.putInt(12, (int) resDenomenator_); return buffer; }
From source file:edu.mbl.jif.imaging.mmtiff.MultipageTiffWriter.java
private void writeNullOffsetAfterLastImage() throws IOException { ByteBuffer buffer = ByteBuffer.allocate(4); buffer.order(BYTE_ORDER);/*from w w w. j a v a 2s . c o m*/ buffer.putInt(0, 0); fileChannel_.write(buffer, nextIFDOffsetLocation_); }
From source file:edu.mbl.jif.imaging.mmtiff.MultipageTiffWriter.java
private void writeComments() throws IOException { //Write 4 byte header, 4 byte number of bytes JSONObject comments;/* ww w . j av a 2 s . c o m*/ try { comments = masterMPTiffStorage_.getDisplayAndComments().getJSONObject("Comments"); } catch (JSONException ex) { comments = new JSONObject(); } String commentsString = comments.toString(); ByteBuffer header = ByteBuffer.allocate(8).order(BYTE_ORDER); header.putInt(0, COMMENTS_HEADER); header.putInt(4, commentsString.length()); ByteBuffer buffer = ByteBuffer.wrap(getBytesFromString(commentsString)); fileChannel_.write(header, filePosition_); fileChannel_.write(buffer, filePosition_ + 8); ByteBuffer offsetHeader = ByteBuffer.allocate(8).order(BYTE_ORDER); offsetHeader.putInt(0, COMMENTS_OFFSET_HEADER); offsetHeader.putInt(4, (int) filePosition_); fileChannel_.write(offsetHeader, 24); filePosition_ += 8 + commentsString.length(); }
From source file:edu.mbl.jif.imaging.mmtiff.MultipageTiffWriter.java
private void writeImageDescription(String value, long imageDescriptionTagOffset) throws IOException { //write first image IFD ByteBuffer ifdCountAndValueBuffer = ByteBuffer.allocate(8).order(BYTE_ORDER); ifdCountAndValueBuffer.putInt(0, value.length()); ifdCountAndValueBuffer.putInt(4, (int) filePosition_); fileChannel_.write(ifdCountAndValueBuffer, imageDescriptionTagOffset + 4); //write String ByteBuffer buffer = ByteBuffer.wrap(getBytesFromString(value)); fileChannel_.write(buffer, filePosition_); filePosition_ += buffer.capacity();//from w w w . j a v a 2 s .co m }
From source file:edu.mbl.jif.imaging.mmtiff.MultipageTiffWriter.java
private void writeIndexMap() throws IOException { //Write 4 byte header, 4 byte number of entries, and 20 bytes for each entry int numMappings = indexMap_.size(); ByteBuffer buffer = ByteBuffer.allocate(8 + 20 * numMappings).order(BYTE_ORDER); buffer.putInt(0, INDEX_MAP_HEADER); buffer.putInt(4, numMappings);// www .j av a 2 s .com int position = 2; for (String label : indexMap_.keySet()) { String[] indecies = label.split("_"); for (String index : indecies) { buffer.putInt(4 * position, Integer.parseInt(index)); position++; } buffer.putInt(4 * position, indexMap_.get(label).intValue()); position++; } fileChannel_.write(buffer, filePosition_); ByteBuffer header = ByteBuffer.allocate(8).order(BYTE_ORDER); header.putInt(0, INDEX_MAP_OFFSET_HEADER); header.putInt(4, (int) filePosition_); fileChannel_.write(header, 8); filePosition_ += buffer.capacity(); }
From source file:com.codestation.henkakuserver.HenkakuServer.java
/** * Finalize the exploit with the addesses from the device * * @param exploit payload compiled code/*from w w w.j av a2 s .c om*/ * @param params list of addresses from the device * @return patched shellcode * @throws Exception */ private byte[] patchExploit(byte[] exploit, Map<String, String> params) throws Exception { if (params.size() != 7) { throw new Exception("invalid argument count"); } ArrayList<Long> args = new ArrayList<>(); args.add(0L); for (int i = 1; i <= 7; ++i) { String arg = String.format("a%s", i); if (params.containsKey(arg)) { args.add(Long.parseLong(params.get(arg), 16)); } else { throw new Exception(String.format("argument %s is missing", arg)); } } byte[] copy = new byte[exploit.length]; System.arraycopy(exploit, 0, copy, 0, exploit.length); ByteBuffer buf = ByteBuffer.wrap(copy).order(ByteOrder.LITTLE_ENDIAN); int size_words = buf.getInt(0); int dsize = buf.getInt(4 + 0x10); int csize = buf.getInt(4 + 0x20); long data_base = args.get(1) + csize; for (int i = 1; i < size_words; ++i) { long add = 0; byte x = buf.get(size_words * 4 + 4 + i - 1); if (x == 1) { add = data_base; } else if (x != 0) { add = args.get(x); } buf.putInt(i * 4, buf.getInt(i * 4) + (int) add); } byte[] out = new byte[dsize + csize]; System.arraycopy(copy, 4 + 0x40, out, csize, dsize); System.arraycopy(copy, 4 + 0x40 + dsize, out, 0, csize); return out; }
From source file:edu.mbl.jif.imaging.mmtiff.MultipageTiffWriter.java
private void writeDisplaySettings() throws IOException { JSONArray displaySettings;/*ww w .j a v a2 s. co m*/ try { displaySettings = masterMPTiffStorage_.getDisplayAndComments().getJSONArray("Channels"); } catch (JSONException ex) { displaySettings = new JSONArray(); } int numReservedBytes = numChannels_ * DISPLAY_SETTINGS_BYTES_PER_CHANNEL; ByteBuffer header = ByteBuffer.allocate(8).order(BYTE_ORDER); ByteBuffer buffer = ByteBuffer.wrap(getBytesFromString(displaySettings.toString())); header.putInt(0, DISPLAY_SETTINGS_HEADER); header.putInt(4, numReservedBytes); fileChannel_.write(header, filePosition_); fileChannel_.write(buffer, filePosition_ + 8); ByteBuffer offsetHeader = ByteBuffer.allocate(8).order(BYTE_ORDER); offsetHeader.putInt(0, DISPLAY_SETTINGS_OFFSET_HEADER); offsetHeader.putInt(4, (int) filePosition_); fileChannel_.write(offsetHeader, 16); filePosition_ += numReservedBytes + 8; }