List of usage examples for java.nio ByteOrder LITTLE_ENDIAN
ByteOrder LITTLE_ENDIAN
To view the source code for java.nio ByteOrder LITTLE_ENDIAN.
Click Source Link
From source file:io.github.dsheirer.record.wave.WaveWriter.java
/** * Creates a wave file header with a format descriptor chunk *//*from w ww. ja v a 2 s .c o m*/ public static ByteBuffer getWaveHeader(AudioFormat format) { ByteBuffer header = ByteBuffer.allocate(12).order(ByteOrder.LITTLE_ENDIAN); //RIFF/WAVE header and size header.put(RIFF_ID.getBytes()); header.putInt(INITIAL_TOTAL_LENGTH); header.put(WAVE_ID.getBytes()); //Reset the buffer pointer to 0 header.position(0); return header; }
From source file:nl.salp.warcraft4j.fileformat.dbc.DbcFile.java
/** * Read the id from an entry with an index from a reader. * * @param index The index of the entry. * @param reader The reader to read the entry from. * * @return The entry id.//from www. jav a 2s. c o m * * @throws DbcParsingException When reading the entry id failed. */ private int getEntryId(int index, DataReader reader) throws DbcParsingException { DbcHeader header = getHeader(); int entryOffset = header.getEntryBlockStartingOffset() + (index * header.getEntrySize()); return reader.read(DataTypeFactory.getInteger(), entryOffset, ByteOrder.LITTLE_ENDIAN); }
From source file:xbird.util.nio.RemoteMemoryMappedFile.java
private int[] recvResponse(final ReadableByteChannel channel, final ByteBuffer buf, final int dstlen) throws IOException { buf.clear();//from w ww.ja v a2s . c om // set endian optimized for this machine final boolean isBufBigEndian = (buf.order() == ByteOrder.BIG_ENDIAN); if (_bigEndian != isBufBigEndian) { buf.order(_bigEndian ? ByteOrder.BIG_ENDIAN : ByteOrder.LITTLE_ENDIAN); } NIOUtils.readFully(channel, buf, _pageSize); buf.flip(); IntBuffer ibuf = buf.asIntBuffer(); int[] dst = new int[dstlen]; ibuf.get(dst); return dst; }
From source file:com.nordicsemi.nrfUARTv2.MainActivity.java
private short bytesToShort(byte b1, byte b2) { ByteBuffer bb = ByteBuffer.allocate(2); bb.order(ByteOrder.LITTLE_ENDIAN); bb.put(b1);// w w w . ja v a 2s .c om bb.put(b2); short shortVal = bb.getShort(0); return shortVal; }
From source file:org.apache.htrace.impl.PackedBufferManager.java
private void readAndValidateResponseFrame(SelectionKey sockKey, ByteBuffer buf, long expectedSeq, int expectedMethodId) throws IOException { buf.clear();/*from ww w. j a v a 2 s .c o m*/ buf.limit(PackedBuffer.HRPC_RESP_FRAME_LENGTH); doRecv(sockKey, buf); buf.flip(); buf.order(ByteOrder.LITTLE_ENDIAN); long seq = buf.getLong(); if (seq != expectedSeq) { throw new IOException("Expected sequence number " + expectedSeq + ", but got sequence number " + seq); } int methodId = buf.getInt(); if (expectedMethodId != methodId) { throw new IOException("Expected method id " + expectedMethodId + ", but got " + methodId); } int errorLength = buf.getInt(); buf.getInt(); if ((errorLength < 0) || (errorLength > PackedBuffer.MAX_HRPC_ERROR_LENGTH)) { throw new IOException("Got server error with invalid length " + errorLength); } else if (errorLength > 0) { buf.clear(); buf.limit(errorLength); doRecv(sockKey, buf); buf.flip(); CharBuffer charBuf = StandardCharsets.UTF_8.decode(buf); String serverErrorStr = charBuf.toString(); throw new IOException("Got server error " + serverErrorStr); } }
From source file:io.github.dsheirer.record.wave.WaveWriter.java
/** * Creates an audio format chunk//from www .j av a2 s . com */ public static ByteBuffer getFormatChunk(AudioFormat format) { ByteBuffer header = ByteBuffer.allocate(24).order(ByteOrder.LITTLE_ENDIAN); //Format descriptor header.put(FORMAT_CHUNK_ID.getBytes()); header.putInt(FORMAT_CHUNK_LENGTH); header.putShort(FORMAT_UNCOMPRESSED_PCM); header.putShort((short) format.getChannels()); header.putInt((int) format.getSampleRate()); //Byte Rate = sample rate * channels * bits per sample / 8 int frameByteRate = format.getChannels() * format.getSampleSizeInBits() / 8; int byteRate = (int) (format.getSampleRate() * frameByteRate); header.putInt(byteRate); //Block Align header.putShort((short) frameByteRate); //Bits per Sample header.putShort((short) format.getSampleSizeInBits()); //Reset the buffer pointer to 0 header.position(0); return header; }
From source file:ffx.xray.MTZFilter.java
/** * {@inheritDoc}/*from ww w .j a v a2 s .c om*/ */ @Override public boolean readFile(File mtzFile, ReflectionList reflectionlist, DiffractionRefinementData refinementdata, CompositeConfiguration properties) { int nread, nignore, nres, nfriedel, ncut; ByteOrder b = ByteOrder.nativeOrder(); FileInputStream fis; DataInputStream dis; boolean transpose = false; StringBuilder sb = new StringBuilder(); //sb.append(String.format("\n Opening %s\n", mtzFile.getName())); try { fis = new FileInputStream(mtzFile); dis = new DataInputStream(fis); byte headeroffset[] = new byte[4]; byte bytes[] = new byte[80]; int offset = 0; // eat "MTZ" title dis.read(bytes, offset, 4); String mtzstr = new String(bytes); // header offset dis.read(headeroffset, offset, 4); // machine stamp dis.read(bytes, offset, 4); ByteBuffer bb = ByteBuffer.wrap(bytes); int stamp = bb.order(ByteOrder.BIG_ENDIAN).getInt(); String stampstr = Integer.toHexString(stamp); switch (stampstr.charAt(0)) { case '1': case '3': if (b.equals(ByteOrder.LITTLE_ENDIAN)) { b = ByteOrder.BIG_ENDIAN; } break; case '4': if (b.equals(ByteOrder.BIG_ENDIAN)) { b = ByteOrder.LITTLE_ENDIAN; } break; } bb = ByteBuffer.wrap(headeroffset); int headeroffseti = bb.order(b).getInt(); // skip to header and parse dis.skipBytes((headeroffseti - 4) * 4); for (Boolean parsing = true; parsing; dis.read(bytes, offset, 80)) { mtzstr = new String(bytes); parsing = parseHeader(mtzstr); } // column identifiers foString = sigfoString = rfreeString = null; if (properties != null) { foString = properties.getString("fostring", null); sigfoString = properties.getString("sigfostring", null); rfreeString = properties.getString("rfreestring", null); } h = k = l = fo = sigfo = rfree = -1; fplus = sigfplus = fminus = sigfminus = rfreeplus = rfreeminus = -1; boolean print = true; parseColumns(print); if (h < 0 || k < 0 || l < 0) { String message = "Fatal error in MTZ file - no H K L indexes?\n"; logger.log(Level.SEVERE, message); return false; } // reopen to start at beginning fis = new FileInputStream(mtzFile); dis = new DataInputStream(fis); // skip initial header dis.skipBytes(80); // check if HKLs need to be transposed or not float data[] = new float[nColumns]; HKL mate = new HKL(); int nposignore = 0; int ntransignore = 0; int nzero = 0; int none = 0; for (int i = 0; i < nReflections; i++) { for (int j = 0; j < nColumns; j++) { dis.read(bytes, offset, 4); bb = ByteBuffer.wrap(bytes); data[j] = bb.order(b).getFloat(); } int ih = (int) data[h]; int ik = (int) data[k]; int il = (int) data[l]; boolean friedel = reflectionlist.findSymHKL(ih, ik, il, mate, false); HKL hklpos = reflectionlist.getHKL(mate); if (hklpos == null) { nposignore++; } friedel = reflectionlist.findSymHKL(ih, ik, il, mate, true); HKL hkltrans = reflectionlist.getHKL(mate); if (hkltrans == null) { ntransignore++; } if (rfree > 0) { if (((int) data[rfree]) == 0) { nzero++; } else if (((int) data[rfree]) == 1) { none++; } } if (rfreeplus > 0) { if (((int) data[rfreeplus]) == 0) { nzero++; } else if (((int) data[rfreeplus]) == 1) { none++; } } if (rfreeminus > 0) { if (((int) data[rfreeminus]) == 0) { nzero++; } else if (((int) data[rfreeminus]) == 1) { none++; } } } if (nposignore > ntransignore) { transpose = true; } if (none > (nzero * 2) && refinementdata.rfreeflag < 0) { refinementdata.setFreeRFlag(0); sb.append(String.format(" Setting R free flag to %d based on MTZ file data.\n", refinementdata.rfreeflag)); } else if (nzero > (none * 2) && refinementdata.rfreeflag < 0) { refinementdata.setFreeRFlag(1); sb.append(String.format(" Setting R free flag to %d based on MTZ file data.\n", refinementdata.rfreeflag)); } else if (refinementdata.rfreeflag < 0) { refinementdata.setFreeRFlag(0); sb.append(String.format(" Setting R free flag to MTZ default: %d\n", refinementdata.rfreeflag)); } // reopen to start at beginning fis = new FileInputStream(mtzFile); dis = new DataInputStream(fis); // skip initial header dis.skipBytes(80); // read in data double anofsigf[][] = new double[refinementdata.n][4]; for (int i = 0; i < refinementdata.n; i++) { anofsigf[i][0] = anofsigf[i][1] = anofsigf[i][2] = anofsigf[i][3] = Double.NaN; } nread = nignore = nres = nfriedel = ncut = 0; for (int i = 0; i < nReflections; i++) { for (int j = 0; j < nColumns; j++) { dis.read(bytes, offset, 4); bb = ByteBuffer.wrap(bytes); data[j] = bb.order(b).getFloat(); } int ih = (int) data[h]; int ik = (int) data[k]; int il = (int) data[l]; boolean friedel = reflectionlist.findSymHKL(ih, ik, il, mate, transpose); HKL hkl = reflectionlist.getHKL(mate); if (hkl != null) { if (fo > 0 && sigfo > 0) { if (refinementdata.fsigfcutoff > 0.0) { if ((data[fo] / data[sigfo]) < refinementdata.fsigfcutoff) { ncut++; continue; } } if (friedel) { anofsigf[hkl.index()][2] = data[fo]; anofsigf[hkl.index()][3] = data[sigfo]; nfriedel++; } else { anofsigf[hkl.index()][0] = data[fo]; anofsigf[hkl.index()][1] = data[sigfo]; } } else { if (fplus > 0 && sigfplus > 0) { if (refinementdata.fsigfcutoff > 0.0) { if ((data[fplus] / data[sigfplus]) < refinementdata.fsigfcutoff) { ncut++; continue; } } anofsigf[hkl.index()][0] = data[fplus]; anofsigf[hkl.index()][1] = data[sigfplus]; } if (fminus > 0 && sigfminus > 0) { if (refinementdata.fsigfcutoff > 0.0) { if ((data[fminus] / data[sigfminus]) < refinementdata.fsigfcutoff) { ncut++; continue; } } anofsigf[hkl.index()][2] = data[fminus]; anofsigf[hkl.index()][3] = data[sigfminus]; } } if (rfree > 0) { refinementdata.setFreeR(hkl.index(), (int) data[rfree]); } else { if (rfreeplus > 0 && rfreeminus > 0) { // not sure what the correct thing to do here is? refinementdata.setFreeR(hkl.index(), (int) data[rfreeplus]); } else if (rfreeplus > 0) { refinementdata.setFreeR(hkl.index(), (int) data[rfreeplus]); } else if (rfreeminus > 0) { refinementdata.setFreeR(hkl.index(), (int) data[rfreeminus]); } } nread++; } else { HKL tmp = new HKL(ih, ik, il); if (!reflectionlist.resolution .inInverseResSqRange(Crystal.invressq(reflectionlist.crystal, tmp))) { nres++; } else { nignore++; } } } // set up fsigf from F+ and F- refinementdata.generate_fsigf_from_anofsigf(anofsigf); sb.append(String.format(" MTZ file type (machine stamp): %s\n", stampstr)); sb.append(String.format(" HKL data is %s\n", transpose ? "transposed" : "not transposed")); sb.append(String.format(" HKL read in: %d\n", nread)); sb.append(String.format(" HKL read as friedel mates: %d\n", nfriedel)); sb.append(String.format(" HKL NOT read in (too high resolution): %d\n", nres)); sb.append(String.format(" HKL NOT read in (not in internal list?): %d\n", nignore)); sb.append(String.format(" HKL NOT read in (F/sigF cutoff): %d\n", ncut)); sb.append( String.format(" HKL in internal list: %d\n", reflectionlist.hkllist.size())); if (logger.isLoggable(Level.INFO)) { logger.info(sb.toString()); } if (rfree < 0 && rfreeplus < 0 && rfreeminus < 0) { refinementdata.generateRFree(); } } catch (EOFException eof) { System.out.println("EOF reached "); return false; } catch (IOException ioe) { System.out.println("IO Exception: " + ioe.getMessage()); return false; } return true; }
From source file:com.yobidrive.diskmap.buckets.BucketTableManager.java
private void initializeBucketTableFromLastCommittedBucketFile() throws BucketTableManagerException { FileInputStream tableStream = null; FileChannel fileChannel = null; try {//from ww w . j a va2 s .c o m File latestCommittedFile = getLatestCommitedFile(); if (latestCommittedFile != null) { tableStream = new FileInputStream(latestCommittedFile); fileChannel = tableStream.getChannel(); ByteBuffer buffer = ByteBuffer.allocate(HEADERSIZE); fileChannel.position(0L); int read = fileChannel.read(buffer); if (read < HEADERSIZE) { fileChannel.close(); throw new BucketTableManagerException( "Wrong bucket table header size: " + read + "/" + HEADERSIZE); } // Check content of header. Start with Big Endian (default for Java) buffer.rewind(); byteOrder = ByteOrder.BIG_ENDIAN; buffer.order(byteOrder); int magic = buffer.getInt(); if (magic == MAGICSTART_BADENDIAN) { byteOrder = ByteOrder.LITTLE_ENDIAN; buffer.order(byteOrder); } else if (magic != MAGICSTART) { fileChannel.close(); throw new BucketTableManagerException("Bad header in bucket table file"); } // Read number of buckets long headerMapSize = buffer.getLong(); // Read checkPoint NeedlePointer includedCheckpoint = new NeedlePointer(); includedCheckpoint.getNeedlePointerFromBuffer(buffer); // Read second magic number magic = buffer.getInt(); if (magic != MAGICEND) { fileChannel.close(); throw new BucketTableManagerException("Bad header in bucket table file"); } // Check number of buckets against requested map size if (headerMapSize != mapSize) { // Map size does not match fileChannel.close(); throw new BucketTableManagerException( "Requested map size " + mapSize + " does not match header map size " + headerMapSize); } // Sets initial checkpoint bucketTable.setInitialCheckPoint(includedCheckpoint); // Now reads all entries logger.info("Hot start: loading buckets..."); for (int i = 0; i < nbBuffers; i++) { bucketTable.prepareBufferForReading(i); read = fileChannel.read(bucketTable.getBuffer(i)); if (read < bucketTable.getBuffer(i).limit()) throw new BucketTableManagerException("Incomplete bucket table file " + latestCommittedFile.getName() + ", expected " + mapSize + HEADERSIZE); //else // logger.info("Hot start: loaded "+(i+1)*entriesPerBuffer+" buckets"+((i<(nbBuffers-1))?"...":"")) ; } // Checks second magic marker buffer = ByteBuffer.allocate(NeedleLogInfo.INFOSIZE); buffer.rewind(); buffer.limit(INTSIZE); if (fileChannel.read(buffer) < INTSIZE) throw new BucketTableManagerException( "Incomplete bucket table file, missing secong magic number " + latestCommittedFile.getName()); buffer.rewind(); magic = buffer.getInt(); if (magic != MAGICSTART) { fileChannel.close(); throw new BucketTableManagerException("Bad header in bucket table file"); } // Now reads clean counters while (true) { buffer.rewind(); buffer.limit(NeedleLogInfo.INFOSIZE); read = fileChannel.read(buffer); if (read > 0 && read < NeedleLogInfo.INFOSIZE) throw new BucketTableManagerException("Incomplete bucket table file, log info too short " + latestCommittedFile.getName() + ", expected " + mapSize + HEADERSIZE); if (read <= 0) break; else { NeedleLogInfo nli = new NeedleLogInfo(useAverage); buffer.rewind(); nli.getNeedleLogInfo(buffer); logInfoPerLogNumber.put(new Integer(nli.getNeedleFileNumber()), nli); } } logger.info("Hot start: loaded " + (nbBuffers * entriesPerBuffer) + " buckets"); } else { // Empty file bucketTable.setInitialCheckPoint(new NeedlePointer()); bucketTable.format(); } } catch (IOException ie) { throw new BucketTableManagerException("Failed initializing bucket table", ie); } catch (BufferUnderflowException bue) { throw new BucketTableManagerException("Bucket table too short", bue); } finally { if (fileChannel != null) { try { fileChannel.close(); } catch (IOException ex) { throw new BucketTableManagerException("Error while closing file channel", ex); } } } }
From source file:au.org.ala.layers.grid.GridCacheBuilder.java
static void nextRowOfFloats(float[] row, String datatype, boolean byteOrderLSB, int ncols, RandomAccessFile raf, byte[] b, float noDataValue) throws IOException { int size = 4; if (datatype.charAt(0) == 'U') { size = 1;/*from www .j a v a2 s . com*/ } else if (datatype.charAt(0) == 'B') { size = 1; } else if (datatype.charAt(0) == 'S') { size = 2; } else if (datatype.charAt(0) == 'I') { size = 4; } else if (datatype.charAt(0) == 'L') { size = 8; } else if (datatype.charAt(0) == 'F') { size = 4; } else if (datatype.charAt(0) == 'D') { size = 8; } raf.read(b, 0, size * ncols); ByteBuffer bb = ByteBuffer.wrap(b); if (byteOrderLSB) { bb.order(ByteOrder.LITTLE_ENDIAN); } else { bb.order(ByteOrder.BIG_ENDIAN); } int i; int length = ncols; if (datatype.charAt(0) == 'U') { for (i = 0; i < length; i++) { float ret = bb.get(); if (ret < 0) { ret += 256; } row[i] = ret; } } else if (datatype.charAt(0) == 'B') { for (i = 0; i < length; i++) { row[i] = (float) bb.get(); } } else if (datatype.charAt(0) == 'S') { for (i = 0; i < length; i++) { row[i] = (float) bb.getShort(); } } else if (datatype.charAt(0) == 'I') { for (i = 0; i < length; i++) { row[i] = (float) bb.getInt(); } } else if (datatype.charAt(0) == 'L') { for (i = 0; i < length; i++) { row[i] = (float) bb.getLong(); } } else if (datatype.charAt(0) == 'F') { for (i = 0; i < length; i++) { row[i] = (float) bb.getFloat(); } } else if (datatype.charAt(0) == 'D') { for (i = 0; i < length; i++) { row[i] = (float) bb.getDouble(); } } else { logger.info("UNKNOWN TYPE: " + datatype); } for (i = 0; i < length; i++) { if (row[i] == noDataValue) { row[i] = Float.NaN; } } }
From source file:se.kth.android.StudentCode.StudentCode.java
public void playsoundexample() { if (init_done && (!file_loaded) && (!(d_filename == null))) { set_output_text(d_filename);/* w ww. ja v a 2 s . c om*/ the_sound_file_contents = read_data_from_file(d_filename); // Read file from plain file of samples in form of shorts the_sound_file_contents_bb = ByteBuffer.wrap(the_sound_file_contents); // Wrapper to easier access content. the_sound_file_contents_bb.order(ByteOrder.LITTLE_ENDIAN); file_loaded = true; } ; if (file_loaded) { if (the_sound_file_contents_bb.remaining() < 2 * buffer.length) the_sound_file_contents_bb.rewind(); // Start buffer from beginning for (int i1 = 0; i1 < buffer.length; i1++) { buffer[i1] = the_sound_file_contents_bb.getShort(); // Create a buffer of shorts } ; p_streaming_buffer_out(buffer, buffer.length, "N3"); sound_out(buffer, buffer.length); // Send buffer to player } ; }