List of usage examples for java.io DataInputStream readInt
public final int readInt() throws IOException
readInt
method of DataInput
. From source file:com.ning.arecibo.util.timeline.times.TimelineCoderImpl.java
@Override public int countTimeBytesSamples(final byte[] timeBytes) { int count = 0; try {/*from ww w .ja v a 2 s .co m*/ final ByteArrayInputStream byteStream = new ByteArrayInputStream(timeBytes); final DataInputStream byteDataStream = new DataInputStream(byteStream); int opcode; while ((opcode = byteDataStream.read()) != -1) { if (opcode == TimelineOpcode.FULL_TIME.getOpcodeIndex()) { byteDataStream.readInt(); count++; } else if (opcode <= TimelineOpcode.MAX_DELTA_TIME) { count++; } else if (opcode == TimelineOpcode.REPEATED_DELTA_TIME_BYTE.getOpcodeIndex()) { count += byteDataStream.read(); byteDataStream.read(); } else if (opcode == TimelineOpcode.REPEATED_DELTA_TIME_SHORT.getOpcodeIndex()) { count += byteDataStream.readUnsignedShort(); byteDataStream.read(); } else { throw new IllegalStateException(String .format("In TimelineCoder.countTimeBytesSamples(), unrecognized opcode %d", opcode)); } } return count; } catch (IOException e) { log.error(e, "IOException while counting timeline samples"); return count; } }
From source file:org.carbondata.processing.globalsurrogategenerator.LevelGlobalSurrogateGeneratorThread.java
private int getMaxKeyAssigned(CarbonFile memberFile) { DataInputStream inputStream = null; try {//from ww w . j ava 2s. c o m inputStream = FileFactory.getDataInputStream(memberFile.getPath(), FileFactory.getFileType(memberFile.getPath())); return inputStream.readInt(); } catch (FileNotFoundException e) { LOGGER.error(e, e.getMessage()); } catch (IOException e) { LOGGER.error(e, e.getMessage()); } finally { CarbonUtil.closeStreams(inputStream); } return -1; }
From source file:eu.delving.sip.files.ReportFile.java
public ReportFile(File reportFile, File reportIndexFile, File invalidFile, File linkFile, DataSet dataSet, String prefix) throws IOException { this.reportFile = reportFile; this.reportAccess = new RandomAccessFile(this.reportFile, "r"); this.reportIndexAccess = new RandomAccessFile(reportIndexFile, "r"); this.linkFile = new LinkFile(linkFile, dataSet, prefix); this.dataSet = dataSet; this.prefix = prefix; int recordCount = (int) (reportIndexAccess.length() / LONG_SIZE); recs = new ArrayList<Rec>(recordCount); for (int walk = 0; walk < recordCount; walk++) recs.add(new Rec(walk)); DataInputStream invalidIn = new DataInputStream(new FileInputStream(invalidFile)); int invalidCount = invalidIn.readInt(); invalidRecs = new ArrayList<Rec>(invalidCount); for (int walk = 0; walk < invalidCount; walk++) { int recordNumber = invalidIn.readInt(); invalidRecs.add(recs.get(recordNumber)); }/* w w w.j a va2s . com*/ invalidIn.close(); }
From source file:org.apache.pdfbox.tools.imageio.TestImageIOUtils.java
/** * checks whether the resolution of a BMP image file is as expected. * * @param filename the name of the BMP file * @param expectedResolution the expected resolution * * @throws IOException if something goes wrong *//*from w w w . j a v a 2 s . c o m*/ private void checkBmpResolution(String filename, int expectedResolution) throws FileNotFoundException, IOException { // BMP format explained here: // http://www.javaworld.com/article/2077561/learn-java/java-tip-60--saving-bitmap-files-in-java.html // we skip 38 bytes and then read two 4 byte-integers and reverse the bytes DataInputStream dis = new DataInputStream(new FileInputStream(new File(filename))); int skipped = dis.skipBytes(38); assertEquals("Can't skip 38 bytes in image file " + filename, 38, skipped); int pixelsPerMeter = Integer.reverseBytes(dis.readInt()); int actualResolution = (int) Math.round(pixelsPerMeter / 100.0 * 2.54); assertEquals("X resolution doesn't match in image file " + filename, expectedResolution, actualResolution); pixelsPerMeter = Integer.reverseBytes(dis.readInt()); actualResolution = (int) Math.round(pixelsPerMeter / 100.0 * 2.54); assertEquals("Y resolution doesn't match in image file " + filename, expectedResolution, actualResolution); dis.close(); }
From source file:org.bdval.cache.TableCache.java
public ObjectSet<CharSequence> getTableColumnIds(final int splitId, final String splitType, final String datasetName) { final File cachedTableFile = getCachedTableFile(splitId, splitType, datasetName); final ObjectSet<CharSequence> result = new ObjectOpenHashSet<CharSequence>(); DataInputStream dataInput = null; try {/*from w w w.j a v a 2s. c o m*/ dataInput = new DataInputStream(new FastBufferedInputStream(new FileInputStream(cachedTableFile))); final int numberOfColumns = dataInput.readInt(); LOG.info("Reading cached table with " + numberOfColumns + " columns"); for (int i = 0; i < numberOfColumns; i++) { final String colType = dataInput.readUTF(); final String colId = dataInput.readUTF(); result.add(colId); if ("s".equals(colType)) { final int numStrings = dataInput.readInt(); for (int j = 0; j < numStrings; j++) { dataInput.readUTF(); } } else if ("d".equals(colType)) { final int numDoubles = dataInput.readInt(); // we don't need to read these doubles, just skip them; final int numBytes = Double.SIZE * numDoubles / 8; final int actualBytes = dataInput.skipBytes(numBytes); if (actualBytes != numBytes) { LOG.warn("actual bytes skipped (" + actualBytes + ") does " + "not equal expected of " + numBytes); } } else { LOG.error("UNKNOWN COLUMN TYPE " + colType + " cannot read cached table from file " + filenameOf(cachedTableFile)); return null; } } return result; } catch (IOException e) { LOG.error("Error getting column ids", e); return null; } finally { IOUtils.closeQuietly(dataInput); } }
From source file:org.sakaiproject.util.serialize.Type1BaseResourcePropertiesSerializer.java
/** * @see org.sakaiproject.entity.api.serialize.DataStreamEntitySerializer#parse(org.sakaiproject.entity.api.serialize.SerializableEntity, * java.io.DataInputStream)//ww w .j a va2 s . c o m */ public void parse(SerializableEntity se, DataInputStream ds) throws EntityParseException { if (!(se instanceof SerializablePropertiesAccess)) { throw new EntityParseException("Cant serialize " + se + " as it is not a SerializableProperties "); } SerializablePropertiesAccess sp = (SerializablePropertiesAccess) se; Map<String, Object> properties = new HashMap<String, Object>(); try { int type = ds.readInt(); if (type == TYPE1) { int block = ds.readInt(); if (block == BLOCK1) { int nprops = ds.readInt(); for (int i = 0; i < nprops; i++) { block = ds.readInt(); switch (block) { case BLOCK2: { String key = ds.readUTF(); String value = ds.readUTF(); properties.put(key, value); } break; case BLOCK3: { String key = ds.readUTF(); int n = ds.readInt(); List<String> l = new Vector<String>(); for (int j = 0; j < n; j++) { l.add(ds.readUTF()); } properties.put(key, l); } break; default: throw new EntityParseException("Unrecognised block number " + block); } } sp.setSerializableProperties(properties); } else { throw new EntityParseException("Failed to parse entity, unrecognised block " + block); } } else { throw new EntityParseException("Cant Parse block, resource properties is not type 1 " + type); } } catch (EntityParseException ep) { throw ep; } catch (Exception ex) { throw new EntityParseException("Failed to parse entity ", ex); } }
From source file:org.codehaus.groovy.grails.web.pages.GroovyPageMetaInfo.java
/** * reads the linenumber mapping information from a separate file that has been generated at precompile time * * @throws IOException/*from w w w .j a v a2 s . c o m*/ */ private void readLineNumbers() throws IOException { String dataResourceName = resolveDataResourceName(LINENUMBERS_DATA_POSTFIX); DataInputStream input = null; try { input = new DataInputStream(pageClass.getResourceAsStream(dataResourceName)); int arrayLen = input.readInt(); lineNumbers = new int[arrayLen]; for (int i = 0; i < arrayLen; i++) { lineNumbers[i] = input.readInt(); } } finally { IOUtils.closeQuietly(input); } }
From source file:org.apache.fop.fonts.type1.PFBParser.java
private void parsePCFormat(PFBData pfb, DataInputStream din) throws IOException { int segmentHead; int segmentType; int bytesRead; //Read first segment segmentHead = din.readUnsignedByte(); if (segmentHead != 128) { throw new IOException("Invalid file format. Expected ASCII 80hex"); }/*w w w . ja v a 2 s. c o m*/ segmentType = din.readUnsignedByte(); //Read int len1 = swapInteger(din.readInt()); byte[] headerSegment = new byte[len1]; din.readFully(headerSegment); pfb.setHeaderSegment(headerSegment); //Read second segment segmentHead = din.readUnsignedByte(); if (segmentHead != 128) { throw new IOException("Invalid file format. Expected ASCII 80hex"); } segmentType = din.readUnsignedByte(); int len2 = swapInteger(din.readInt()); byte[] encryptedSegment = new byte[len2]; din.readFully(encryptedSegment); pfb.setEncryptedSegment(encryptedSegment); //Read third segment segmentHead = din.readUnsignedByte(); if (segmentHead != 128) { throw new IOException("Invalid file format. Expected ASCII 80hex"); } segmentType = din.readUnsignedByte(); int len3 = swapInteger(din.readInt()); byte[] trailerSegment = new byte[len3]; din.readFully(trailerSegment); pfb.setTrailerSegment(trailerSegment); //Read EOF indicator segmentHead = din.readUnsignedByte(); if (segmentHead != 128) { throw new IOException("Invalid file format. Expected ASCII 80hex"); } segmentType = din.readUnsignedByte(); if (segmentType != 3) { throw new IOException("Expected segment type 3, but found: " + segmentType); } }
From source file:org.calrissian.accumulorecipes.commons.support.qfd.KeyToAttributeStoreWholeColFXform.java
@Override public V apply(Map.Entry<Key, Value> keyValueEntry) { try {//from ww w . ja v a 2 s . c o m B entry = null; List<Map.Entry<Key, Value>> groupedKVs = decodeRow(keyValueEntry.getKey(), keyValueEntry.getValue()); for (Map.Entry<Key, Value> groupedEvent : groupedKVs) { String[] colQParts = splitPreserveAllTokens(groupedEvent.getKey().getColumnQualifier().toString(), NULL_BYTE); String[] aliasValue = splitPreserveAllTokens(colQParts[1], ONE_BYTE); String visibility = groupedEvent.getKey().getColumnVisibility().toString(); try { ByteArrayInputStream bais = new ByteArrayInputStream(groupedEvent.getValue().get()); DataInputStream dis = new DataInputStream(bais); dis.readLong(); // minimum expiration of keys and values long timestamp = dis.readLong(); if (entry == null) entry = buildEntryFromKey( new Key(keyValueEntry.getKey().getRow(), keyValueEntry.getKey().getColumnFamily(), keyValueEntry.getKey().getColumnQualifier(), timestamp)); int length = dis.readInt(); byte[] metaBytes = new byte[length]; dis.readFully(metaBytes); Map<String, String> meta = metadataSerDe.deserialize(metaBytes); Map<String, String> metadata = (length == 0 ? new HashMap<String, String>() : new HashMap<String, String>(meta)); setVisibility(metadata, visibility); Attribute attribute = new Attribute(colQParts[0], typeRegistry.decode(aliasValue[0], aliasValue[1]), metadata); entry.attr(attribute); } catch (Exception e) { log.error("There was an error deserializing the metadata for a attribute", e); } } return entry.build(); } catch (IOException e) { throw new RuntimeException(e); } }
From source file:org.squidy.nodes.Tracking.java
/** * /*from ww w . ja v a 2s.c o m*/ */ private void startMulticastServer() throws ProcessException { InetAddress multicastGroup; try { multicastGroup = InetAddress.getByName(multicastGroupAddress); } catch (UnknownHostException e) { throw new ProcessException(e.getMessage(), e); } server = new MulticastServer(multicastGroup, port); server.addMulticastListener(new MulticastAdapter() { /* (non-Javadoc) * @see org.squidy.manager.protocol.udp.UDPListener#parseData(byte[]) */ public void parseData(byte[] data) { // TODO [SF]: Do your parsing stuff here!!! ByteArrayInputStream bais = new ByteArrayInputStream(data); DataInputStream instream = new DataInputStream(bais); try { System.out.println("short: " + instream.readUnsignedShort() + " | " + instream.readUnsignedShort() + " | " + (instream.readInt() & 0x7F)); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } // // String s = new String(data); // System.out.println(s); /* * Data is coded with a started MessageID = 7 lastFrameCounter = <FrameId>; if (<singleMarker == true>){ trackedBodies = <numTrackedSingleMarker>; bodyId = 0; //Read x,y,z and convert to mm x = <x>*100; y = <y>*100; z = <z>*100; publish(new DataPosition3D(Tracking.class, bodyID, x, y, z, width, height, depth, lastFrameCounter, trackedBodies)); // only publish single marker positions, not the ones belonging to a rigit body } if (<rigitBody == true>){ trackedBodies = <numTrackedRigitBodies>; bodyId = <rigitBodyId>; //Read x,y,z and convert to mm x = <x>*100; y = <y>*100; z = <z>*100; //Read quatrions qx = <qx>; qy = <qy>; qz = <qz>; qw = <qw>; //Transform to rotation matrix //Spalte 1: rxx = 2*(qx*qx + qw*qw)-1; ryx = 2*(qx*qy + qz*qw); rzx = 2*(qx*qz - qy*qw); Spalte 2: rxy = 2*(qx*qy - qz*qw); ryy = 2*(qy*qy + qq*qw)-1; rzy = 2*(qy*qz + qx*qw); Spalte 3: rxz = 2*(qx*qz + qy*qw); ryz = 2*(qy*qz - qx*qw); rzz = 2*(qz*qz + qw*qw)-1; //quadToMatrix: ? //m[0] = 1-2*q[1]*q[1]-2*q[2]*q[2]; m[1] = 2*q[0]*q[1]-2*q[3]*q[2]; m[2] = 2*q[0]*q[2]+2*q[3]*q[1]; //m[3] = 2*q[0]*q[1]+2*q[3]*q[2]; m[4] = 1-2*q[0]*q[0]-2*q[2]*q[2]; m[5] = 2*q[1]*q[2]-2*q[3]*q[0]; //m[6] = 2*q[0]*q[2]-2*q[3]*q[1]; m[7] = 2*q[1]*q[2]+2*q[3]*q[0]; m[8] = 1-2*q[0]*q[0]-2*q[1]*q[1]; publish(new DataPosition6D(arTracking.getClass(), bodyID, x, y, z, width, height, depth, rxx, ryx, rzx, rxy, ryy, rzy, rxz, ryz, rzz, lastFrameCounter, trackedBodies)); } */ // // ... } }); }