List of usage examples for java.io DataInputStream readLong
public final long readLong() throws IOException
readLong
method of DataInput
. From source file:org.slc.sli.dal.encrypt.AesCipher.java
private Object decryptBinary(String data, Class<?> expectedType) { byte[] decoded = decryptToBytes(data); DataInputStream dis = new DataInputStream(new ByteArrayInputStream(decoded)); try {//from www . j a v a 2 s . c o m if (Boolean.class.equals(expectedType)) { return dis.readBoolean(); } else if (Integer.class.equals(expectedType)) { return dis.readInt(); } else if (Long.class.equals(expectedType)) { return dis.readLong(); } else if (Double.class.equals(expectedType)) { return dis.readDouble(); } else { throw new RuntimeException("Unsupported type: " + expectedType.getCanonicalName()); } } catch (IOException e) { throw new RuntimeException(e); } finally { try { dis.close(); } catch (IOException e) { LOG.error("Unable to close DataInputStream!"); } } }
From source file:com.facebook.infrastructure.db.SuperColumn.java
private SuperColumn defreezeSuperColumn(DataInputStream dis) throws IOException { String name = dis.readUTF();/*from w w w . j av a 2 s . c o m*/ SuperColumn superColumn = new SuperColumn(name); superColumn.markForDeleteAt(dis.readLong()); return superColumn; }
From source file:org.apache.jackrabbit.core.journal.FileRecordLog.java
/** * Create a new instance of this class. Opens a record log in read-only mode. * * @param logFile file containing record log * @throws java.io.IOException if an I/O error occurs */// w w w .j a v a 2s .c o m public FileRecordLog(File logFile) throws IOException { this.logFile = logFile; if (logFile.exists()) { DataInputStream in = new DataInputStream(new BufferedInputStream(new FileInputStream(logFile), 128)); try { readHeader(in); previousRevision = in.readLong(); lastRevision = previousRevision + logFile.length() - HEADER_SIZE; } finally { close(in); } } else { isNew = true; } }
From source file:com.linkedin.pinot.core.segment.index.SegmentMetadataImpl.java
private void loadCreationMeta(File crcFile) throws IOException { if (crcFile.exists()) { final DataInputStream ds = new DataInputStream(new FileInputStream(crcFile)); _crc = ds.readLong(); _creationTime = ds.readLong();/*from w ww . j a va2s . c o m*/ ds.close(); } }
From source file:org.eclipse.gyrex.cloud.internal.queue.Message.java
/** * Creates a new instance.//from w w w. ja v a 2 s . c om * * @param zooKeeperQueue * @param messageId * @param record * @param stat * @throws IOException */ public Message(final String messageId, final ZooKeeperQueue zooKeeperQueue, final byte[] record, final Stat stat) throws IOException { this.messageId = messageId; this.zooKeeperQueue = zooKeeperQueue; queueId = zooKeeperQueue.id; zkNodeDataVersion = stat.getVersion(); final DataInputStream din = new DataInputStream(new ByteArrayInputStream(record)); // serialized format version final int formatVersion = din.readInt(); if (formatVersion != 1) throw new IllegalArgumentException(String .format("invalid record data: version mismatch (expected %d, found %d)", 1, formatVersion)); // timeout invisibleTimeoutTS = din.readLong(); // body size final int length = din.readInt(); // body body = new byte[length]; final int read = din.read(body); if (read != length) throw new IllegalArgumentException( String.format("invalid record data: body size mismatch (expected %d, read %d)", length, read)); }
From source file:org.apache.hadoop.hbase.io.hfile.FixedFileTrailer.java
/** * Deserialize the fixed file trailer from the given stream. The version needs * to already be specified. Make sure this is consistent with * {@link #serialize(DataOutputStream)}. * * @param inputStream//from www . j ava 2s . co m * @param version * @throws IOException */ void deserialize(DataInputStream inputStream) throws IOException { HFile.checkFormatVersion(majorVersion); BlockType.TRAILER.readAndCheck(inputStream); fileInfoOffset = inputStream.readLong(); loadOnOpenDataOffset = inputStream.readLong(); dataIndexCount = inputStream.readInt(); if (majorVersion == 1) { inputStream.readLong(); // Read and skip metaIndexOffset. } else { uncompressedDataIndexSize = inputStream.readLong(); } metaIndexCount = inputStream.readInt(); totalUncompressedBytes = inputStream.readLong(); entryCount = majorVersion == 1 ? inputStream.readInt() : inputStream.readLong(); compressionCodec = Compression.Algorithm.values()[inputStream.readInt()]; if (majorVersion > 1) { numDataIndexLevels = inputStream.readInt(); firstDataBlockOffset = inputStream.readLong(); lastDataBlockOffset = inputStream.readLong(); comparatorClassName = Bytes.readStringFixedSize(inputStream, MAX_COMPARATOR_NAME_LENGTH); } int version = inputStream.readInt(); expectMajorVersion(extractMajorVersion(version)); expectMinorVersion(extractMinorVersion(version)); }
From source file:gridool.util.xfer.TransferUtils.java
public static void sendfile(@Nonnull final File file, final long fromPos, final long count, @Nullable final String writeDirPath, @Nonnull final InetAddress dstAddr, final int dstPort, final boolean append, final boolean sync, @Nonnull final TransferClientHandler handler) throws IOException { if (!file.exists()) { throw new IllegalArgumentException(file.getAbsolutePath() + " does not exist"); }/*from w w w .j av a2s . co m*/ if (!file.isFile()) { throw new IllegalArgumentException(file.getAbsolutePath() + " is not file"); } if (!file.canRead()) { throw new IllegalArgumentException(file.getAbsolutePath() + " cannot read"); } final SocketAddress dstSockAddr = new InetSocketAddress(dstAddr, dstPort); SocketChannel channel = null; Socket socket = null; final OutputStream out; try { channel = SocketChannel.open(); socket = channel.socket(); socket.connect(dstSockAddr); out = socket.getOutputStream(); } catch (IOException e) { LOG.error("failed to connect: " + dstSockAddr, e); IOUtils.closeQuietly(channel); NetUtils.closeQuietly(socket); throw e; } DataInputStream din = null; if (sync) { InputStream in = socket.getInputStream(); din = new DataInputStream(in); } final DataOutputStream dos = new DataOutputStream(out); final StopWatch sw = new StopWatch(); FileInputStream src = null; final long nbytes; try { src = new FileInputStream(file); FileChannel fc = src.getChannel(); String fileName = file.getName(); IOUtils.writeString(fileName, dos); IOUtils.writeString(writeDirPath, dos); long xferBytes = (count == -1L) ? fc.size() : count; dos.writeLong(xferBytes); dos.writeBoolean(append); // append=false dos.writeBoolean(sync); if (handler == null) { dos.writeBoolean(false); } else { dos.writeBoolean(true); handler.writeAdditionalHeader(dos); } // send file using zero-copy send nbytes = fc.transferTo(fromPos, xferBytes, channel); if (LOG.isDebugEnabled()) { LOG.debug("Sent a file '" + file.getAbsolutePath() + "' of " + nbytes + " bytes to " + dstSockAddr.toString() + " in " + sw.toString()); } if (sync) {// receive ack in sync mode long remoteRecieved = din.readLong(); if (remoteRecieved != xferBytes) { throw new IllegalStateException( "Sent " + xferBytes + " bytes, but remote node received " + remoteRecieved + " bytes"); } } } catch (FileNotFoundException e) { LOG.error(PrintUtils.prettyPrintStackTrace(e, -1)); throw e; } catch (IOException e) { LOG.error(PrintUtils.prettyPrintStackTrace(e, -1)); throw e; } finally { IOUtils.closeQuietly(src); IOUtils.closeQuietly(din, dos); IOUtils.closeQuietly(channel); NetUtils.closeQuietly(socket); } }
From source file:org.gdg.frisbee.android.cache.ModelCache.java
private long readExpirationFromDisk(InputStream is) throws IOException { DataInputStream din = new DataInputStream(is); long expiration = din.readLong(); din.close();/*from ww w .j av a2 s .co m*/ return expiration; }
From source file:com.epam.catgenome.manager.reference.io.NibDataReader.java
private void seekBCDISWithIndexFile(final BlockCompressedDataInputStream stream, final DataInputStream index, final long positon) throws IOException { //correct seek in file long seekPosition = positon + stream.getPosition(); long filePosition = index.readLong(); long seekPos = index.readLong(); long helpSeekPosition = seekPosition - seekPos; while (helpSeekPosition >= 0) { seekPosition = helpSeekPosition; filePosition = index.readLong(); seekPos = index.readLong();/*w w w .ja v a 2 s . c o m*/ helpSeekPosition = seekPosition - seekPos; } stream.seek(filePosition + seekPosition); }
From source file:net.sf.gazpachoquest.rest.auth.TokenStore.java
/** * Load the current set of tokens from the token file. If reading the tokens * fails or the token file does not exist, tokens will be generated on * demand.// ww w . j av a 2s . c o m */ private void loadTokens() { if (tokenFile.isFile() && tokenFile.canRead()) { FileInputStream fin = null; DataInputStream keyInputStream = null; try { fin = new FileInputStream(tokenFile); keyInputStream = new DataInputStream(fin); int newCurrentToken = keyInputStream.readInt(); long newNextUpdate = keyInputStream.readLong(); SecretKey[] newKeys = new SecretKey[TOKEN_BUFFER_SIZE]; for (int i = 0; i < newKeys.length; i++) { int isNull = keyInputStream.readInt(); if (isNull == 1) { int l = keyInputStream.readInt(); byte[] b = new byte[l]; keyInputStream.read(b); newKeys[i] = new SecretKeySpec(b, HMAC_SHA1); } else { newKeys[i] = null; } } // assign the tokes and schedule a next update nextUpdate = newNextUpdate; currentToken = newCurrentToken; currentTokens = newKeys; } catch (IOException e) { log.error("Failed to load cookie keys " + e.getMessage()); } finally { if (keyInputStream != null) { try { keyInputStream.close(); } catch (IOException e) { } } else if (fin != null) { try { fin.close(); } catch (IOException e) { } } } } // if there was a failure to read the current tokens, create new ones if (currentTokens == null) { currentTokens = new SecretKey[TOKEN_BUFFER_SIZE]; nextUpdate = System.currentTimeMillis(); currentToken = 0; } }