List of usage examples for java.nio ByteOrder LITTLE_ENDIAN
ByteOrder LITTLE_ENDIAN
To view the source code for java.nio ByteOrder LITTLE_ENDIAN.
Click Source Link
From source file:edu.harvard.iq.dataverse.ingest.tabulardata.impl.plugins.sav.SAVFileReader.java
private void parseRT7SubTypefield(BufferedInputStream stream) throws IOException { int length_unit_length = 4; int length_number_of_units = 4; int storage_size = length_unit_length + length_number_of_units; int[] headerSection = new int[2]; byte[] byteStorage = new byte[storage_size]; try {//from www. j a v a2 s .c o m int nbytes = stream.read(byteStorage); // to-do check against nbytes //printHexDump(byteStorage, "RT7:storage"); ByteBuffer bb_data_type = ByteBuffer.wrap(byteStorage, 0, length_unit_length); if (isLittleEndian) { bb_data_type.order(ByteOrder.LITTLE_ENDIAN); } int unitLength = bb_data_type.getInt(); dbgLog.fine("parseRT7 SubTypefield: unitLength=" + unitLength); ByteBuffer bb_number_of_units = ByteBuffer.wrap(byteStorage, length_unit_length, length_number_of_units); if (isLittleEndian) { bb_number_of_units.order(ByteOrder.LITTLE_ENDIAN); } int numberOfUnits = bb_number_of_units.getInt(); dbgLog.fine("parseRT7 SubTypefield: numberOfUnits=" + numberOfUnits); headerSection[0] = unitLength; headerSection[1] = numberOfUnits; for (int i = 0; i < numberOfUnits; i++) { byte[] work = new byte[unitLength]; int nb = stream.read(work); dbgLog.finer("raw bytes in Hex:" + new String(Hex.encodeHex(work))); ByteBuffer bb_field = ByteBuffer.wrap(work); if (isLittleEndian) { bb_field.order(ByteOrder.LITTLE_ENDIAN); } dbgLog.fine("RT7ST: raw bytes in Hex:" + new String(Hex.encodeHex(bb_field.array()))); if (unitLength == 4) { int fieldData = bb_field.getInt(); dbgLog.fine("RT7ST: " + i + "-th fieldData=" + fieldData); dbgLog.fine("RT7ST: fieldData in Hex=" + Integer.toHexString(fieldData)); } else if (unitLength == 8) { double fieldData = bb_field.getDouble(); dbgLog.finer("RT7ST: " + i + "-th fieldData=" + fieldData); dbgLog.finer("RT7ST: fieldData in Hex=" + Double.toHexString(fieldData)); } dbgLog.finer(""); } } catch (IOException ex) { //ex.printStackTrace(); throw ex; } }
From source file:edu.harvard.iq.dataverse.ingest.tabulardata.impl.plugins.sav.SAVFileReader.java
private List<byte[]> getRT7SubTypefieldData(BufferedInputStream stream) throws IOException { int length_unit_length = 4; int length_number_of_units = 4; int storage_size = length_unit_length + length_number_of_units; List<byte[]> dataList = new ArrayList<byte[]>(); int[] headerSection = new int[2]; byte[] byteStorage = new byte[storage_size]; try {/*from ww w . ja v a 2 s .c om*/ int nbytes = stream.read(byteStorage); // to-do check against nbytes //printHexDump(byteStorage, "RT7:storage"); ByteBuffer bb_data_type = ByteBuffer.wrap(byteStorage, 0, length_unit_length); if (isLittleEndian) { bb_data_type.order(ByteOrder.LITTLE_ENDIAN); } int unitLength = bb_data_type.getInt(); dbgLog.fine("parseRT7SubTypefield: unitLength=" + unitLength); ByteBuffer bb_number_of_units = ByteBuffer.wrap(byteStorage, length_unit_length, length_number_of_units); if (isLittleEndian) { bb_number_of_units.order(ByteOrder.LITTLE_ENDIAN); } int numberOfUnits = bb_number_of_units.getInt(); dbgLog.fine("parseRT7SubTypefield: numberOfUnits=" + numberOfUnits); headerSection[0] = unitLength; headerSection[1] = numberOfUnits; for (int i = 0; i < numberOfUnits; i++) { byte[] work = new byte[unitLength]; int nb = stream.read(work); dbgLog.finer(new String(Hex.encodeHex(work))); dataList.add(work); } } catch (IOException ex) { //ex.printStackTrace(); throw ex; } return dataList; }