List of usage examples for java.io DataInputStream readInt
public final int readInt() throws IOException
readInt
method of DataInput
. From source file:org.apache.hadoop.security.SaslRpcClient.java
/** * Do client side SASL authentication with server via the given InputStream * and OutputStream/*w w w .j a v a2 s .com*/ * * @param inS * InputStream to use * @param outS * OutputStream to use * @return true if connection is set up, or false if needs to switch * to simple Auth. * @throws IOException */ public boolean saslConnect(InputStream inS, OutputStream outS) throws IOException { DataInputStream inStream = new DataInputStream(new BufferedInputStream(inS)); DataOutputStream outStream = new DataOutputStream(new BufferedOutputStream(outS)); try { byte[] saslToken = new byte[0]; if (saslClient.hasInitialResponse()) saslToken = saslClient.evaluateChallenge(saslToken); if (saslToken != null) { outStream.writeInt(saslToken.length); outStream.write(saslToken, 0, saslToken.length); outStream.flush(); if (LOG.isDebugEnabled()) LOG.debug("Have sent token of size " + saslToken.length + " from initSASLContext."); } if (!saslClient.isComplete()) { readStatus(inStream); int len = inStream.readInt(); if (len == SaslRpcServer.SWITCH_TO_SIMPLE_AUTH) { if (LOG.isDebugEnabled()) LOG.debug("Server asks us to fall back to simple auth."); saslClient.dispose(); return false; } saslToken = new byte[len]; if (LOG.isDebugEnabled()) LOG.debug("Will read input token of size " + saslToken.length + " for processing by initSASLContext"); inStream.readFully(saslToken); } while (!saslClient.isComplete()) { saslToken = saslClient.evaluateChallenge(saslToken); if (saslToken != null) { if (LOG.isDebugEnabled()) LOG.debug("Will send token of size " + saslToken.length + " from initSASLContext."); outStream.writeInt(saslToken.length); outStream.write(saslToken, 0, saslToken.length); outStream.flush(); } if (!saslClient.isComplete()) { readStatus(inStream); saslToken = new byte[inStream.readInt()]; if (LOG.isDebugEnabled()) LOG.debug("Will read input token of size " + saslToken.length + " for processing by initSASLContext"); inStream.readFully(saslToken); } } if (LOG.isDebugEnabled()) { LOG.debug("SASL client context established. Negotiated QoP: " + saslClient.getNegotiatedProperty(Sasl.QOP)); } return true; } catch (IOException e) { try { saslClient.dispose(); } catch (SaslException ignored) { // ignore further exceptions during cleanup } throw e; } }
From source file:ch.unil.genescore.vegas.Snp.java
public void readPosAndAllele(DataInputStream is) throws IOException, DataInconsistencyException { //TODO: Only solves homegrown case atm; // @David I changed this to an IllegalArgumentException because the other one was unknown on my system // id_ is already read //String curChr = chr_; //int curStart = start_; //int curEnd = end_; chr_ = is.readUTF();//from w w w. j a v a2 s. co m start_ = is.readInt(); end_ = is.readInt(); //if (curChr != null || curStart != -1 || curEnd != -1){ // if (!chr_.equals(curChr) || start_ != curStart || end_ != curEnd){ // throw new RuntimeException("snp seems to have been set before to another value"); //} //} posStrand_ = is.readBoolean(); char minorAllele = is.readChar(); boolean snpHasBeenSeenInGWAS = false; if (minorAllele_ != 'N' || majorAllele_ != 'N') { snpHasBeenSeenInGWAS = true; } if (Pascal.set.withZScore_ && minorAllele_ != minorAllele) { if (minorAllele != majorAllele_ && snpHasBeenSeenInGWAS) { throw new DataInconsistencyException( "different minor allele of reference population not found GWAS data. Snp left out."); } zscore_ *= -1; char minorAlleleSummaryFile = minorAllele_; char majorAlleleSummaryFile = majorAllele_; minorAllele_ = majorAlleleSummaryFile; majorAllele_ = minorAlleleSummaryFile; } }
From source file:org.apache.hadoop.hbase.security.HBaseSaslRpcClient.java
/** * Do client side SASL authentication with server via the given InputStream * and OutputStream/*w w w . jav a2s. c o m*/ * * @param inS * InputStream to use * @param outS * OutputStream to use * @return true if connection is set up, or false if needs to switch * to simple Auth. * @throws IOException */ public boolean saslConnect(InputStream inS, OutputStream outS) throws IOException { DataInputStream inStream = new DataInputStream(new BufferedInputStream(inS)); DataOutputStream outStream = new DataOutputStream(new BufferedOutputStream(outS)); try { byte[] saslToken = new byte[0]; if (saslClient.hasInitialResponse()) saslToken = saslClient.evaluateChallenge(saslToken); if (saslToken != null) { outStream.writeInt(saslToken.length); outStream.write(saslToken, 0, saslToken.length); outStream.flush(); if (LOG.isDebugEnabled()) LOG.debug("Have sent token of size " + saslToken.length + " from initSASLContext."); } if (!saslClient.isComplete()) { readStatus(inStream); int len = inStream.readInt(); if (len == SaslUtil.SWITCH_TO_SIMPLE_AUTH) { if (!fallbackAllowed) { throw new IOException("Server asks us to fall back to SIMPLE auth, " + "but this client is configured to only allow secure connections."); } if (LOG.isDebugEnabled()) { LOG.debug("Server asks us to fall back to simple auth."); } saslClient.dispose(); return false; } saslToken = new byte[len]; if (LOG.isDebugEnabled()) LOG.debug("Will read input token of size " + saslToken.length + " for processing by initSASLContext"); inStream.readFully(saslToken); } while (!saslClient.isComplete()) { saslToken = saslClient.evaluateChallenge(saslToken); if (saslToken != null) { if (LOG.isDebugEnabled()) LOG.debug("Will send token of size " + saslToken.length + " from initSASLContext."); outStream.writeInt(saslToken.length); outStream.write(saslToken, 0, saslToken.length); outStream.flush(); } if (!saslClient.isComplete()) { readStatus(inStream); saslToken = new byte[inStream.readInt()]; if (LOG.isDebugEnabled()) LOG.debug("Will read input token of size " + saslToken.length + " for processing by initSASLContext"); inStream.readFully(saslToken); } } if (LOG.isDebugEnabled()) { LOG.debug("SASL client context established. Negotiated QoP: " + saslClient.getNegotiatedProperty(Sasl.QOP)); } return true; } catch (IOException e) { try { saslClient.dispose(); } catch (SaslException ignored) { // ignore further exceptions during cleanup } throw e; } }
From source file:com.adito.notification.Notifier.java
void loadFromDisk() throws IOException { File[] f = queueDirectory.listFiles(new FileFilter() { public boolean accept(File f) { return f.getName().endsWith(".msg"); }/*from w w w.j a va 2 s . c om*/ }); // TODO better error handling in parsing of message files. Report on // non-existant / unreadable directory if (f == null) { throw new IOException("Could not list queue directory " + queueDirectory.getAbsolutePath()); } for (int i = 0; i < f.length; i++) { FileInputStream fin = new FileInputStream(f[i]); try { DataInputStream din = new DataInputStream(fin); long id = din.readLong(); String sinkName = din.readUTF(); messageId = Math.max(id, messageId); boolean urgent = din.readBoolean(); String subject = din.readUTF(); List<Recipient> recipientList = new ArrayList<Recipient>(); while (true) { int recipientType = din.readInt(); if (recipientType == Recipient.EOF) { break; } else { String recipientAlias = din.readUTF(); String realmName = din.readUTF(); Recipient recipient = new Recipient(recipientType, recipientAlias, realmName); recipientList.add(recipient); } } Properties parameters = new Properties(); while (true) { int parameterType = din.readInt(); if (parameterType < 1) { break; } else { String key = din.readUTF(); String val = din.readUTF(); parameters.setProperty(key, val); } } String content = din.readUTF(); String lastMessage = din.readUTF(); Message msg = new Message(subject, content, urgent); msg.setId(id); msg.setRecipients(recipientList); msg.setSinkName(sinkName); msg.setLastMessage(lastMessage); queue(msg); } finally { fin.close(); } } }
From source file:br.org.indt.ndg.servlets.PostResults.java
private String Decompress(HttpServletRequest request) { DataInputStream dis = null; DataInputStream objIn = null; ByteArrayOutputStream baos = null; String result = null;/*ww w . j a v a 2s . c o m*/ try { dis = new DataInputStream(request.getInputStream()); baos = new ByteArrayOutputStream(); int length, uncomplength = 0; int data = 0; uncomplength = dis.readInt(); length = dis.readInt(); for (int i = 0; i < length; i++) { data = dis.read(); baos.write((byte) data); } ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray()); ZInputStream zIn = new ZInputStream(bais); objIn = new DataInputStream(zIn); byte[] bytes = new byte[uncomplength]; objIn.readFully(bytes); result = new String(bytes, ENCODING); log.info("Compressed length: " + length + " bytes"); log.info("Decompressed length: " + result.getBytes().length + " bytes"); zIn.close(); dis.close(); baos.close(); objIn.close(); } catch (EOFException e) { servletError = true; log.error(e); } catch (IOException e) { servletError = true; log.error(e); } catch (Exception e) { servletError = true; log.error(e); } return result; }
From source file:org.getspout.spout.packet.PacketAddonData.java
@SuppressWarnings("unchecked") public void readData(DataInputStream input) throws IOException { String id = PacketUtil.readString(input); boolean sandboxed = SpoutClient.isSandboxed(); SpoutClient.enableSandbox();//from www . ja v a 2s . co m try { Class<? extends AddonPacket> packetClass = AddonPacket.getPacketFromId(id); Constructor<? extends AddonPacket> constructor = null; Constructor<? extends AddonPacket>[] constructors = (Constructor<? extends AddonPacket>[]) packetClass .getConstructors(); for (Constructor<? extends AddonPacket> c : constructors) { if (c.getGenericParameterTypes().length == 0) { constructor = c; break; } } packet = constructor.newInstance(); } catch (Exception e) { e.printStackTrace(); } if (!sandboxed) { SpoutClient.disableSandbox(); } int size = input.readInt(); compressed = input.readBoolean(); data = new byte[size]; input.readFully(data); }
From source file:com.ning.arecibo.util.timeline.times.TimelineCoderImpl.java
private byte[] combineTimelines(final List<byte[]> timesList) { final ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); final DataOutputStream dataStream = new DataOutputStream(outputStream); try {//from w ww . j a v a 2 s.co m int lastTime = 0; int lastDelta = 0; int repeatCount = 0; int chunkCounter = 0; for (byte[] times : timesList) { final ByteArrayInputStream byteStream = new ByteArrayInputStream(times); final DataInputStream byteDataStream = new DataInputStream(byteStream); int byteCursor = 0; while (true) { // Part 1: Get the opcode, and come up with newTime, newCount and newDelta final int opcode = byteDataStream.read(); if (opcode == -1) { break; } byteCursor++; int newTime = 0; int newCount = 0; int newDelta = 0; boolean useNewDelta = false; boolean nonDeltaTime = false; if (opcode == TimelineOpcode.FULL_TIME.getOpcodeIndex()) { newTime = byteDataStream.readInt(); if (newTime < lastTime) { log.warn( "In TimelineCoder.combineTimeLines(), the fulltime read is %d, but the lastTime is %d; setting newTime to lastTime", newTime, lastTime); newTime = lastTime; } byteCursor += 4; if (lastTime == 0) { writeTime(0, newTime, dataStream); lastTime = newTime; lastDelta = 0; repeatCount = 0; continue; } else if (newTime - lastTime <= TimelineOpcode.MAX_DELTA_TIME) { newDelta = newTime - lastTime; useNewDelta = true; newCount = 1; } else { nonDeltaTime = true; } } else if (opcode <= TimelineOpcode.MAX_DELTA_TIME) { newTime = lastTime + opcode; newDelta = opcode; useNewDelta = true; newCount = 1; } else if (opcode == TimelineOpcode.REPEATED_DELTA_TIME_BYTE.getOpcodeIndex()) { newCount = byteDataStream.read(); newDelta = byteDataStream.read(); useNewDelta = true; byteCursor += 2; if (lastTime != 0) { newTime = lastTime + newDelta * newCount; } else { throw new IllegalStateException(String.format( "In TimelineCoder.combineTimelines, lastTime is 0 byte opcode = %d, byteCursor %d, chunkCounter %d, chunk %s", opcode, byteCursor, chunkCounter, new String(Hex.encodeHex(times)))); } } else if (opcode == TimelineOpcode.REPEATED_DELTA_TIME_SHORT.getOpcodeIndex()) { newCount = byteDataStream.readUnsignedShort(); newDelta = byteDataStream.read(); useNewDelta = true; byteCursor += 3; if (lastTime != 0) { newTime = lastTime + newDelta * newCount; } } else { throw new IllegalStateException(String.format( "In TimelineCoder.combineTimelines, Unrecognized byte opcode = %d, byteCursor %d, chunkCounter %d, chunk %s", opcode, byteCursor, chunkCounter, new String(Hex.encodeHex(times)))); } // Part 2: Combine existing state represented in lastTime, lastDelta and repeatCount with newTime, newCount and newDelta if (lastTime == 0) { log.error("In combineTimelines(), lastTime is 0; byteCursor %d, chunkCounter %d, times %s", byteCursor, chunkCounter, new String(Hex.encodeHex(times))); } else if (repeatCount > 0) { if (lastDelta == newDelta && newCount > 0) { repeatCount += newCount; lastTime = newTime; } else { writeRepeatedDelta(lastDelta, repeatCount, dataStream); if (useNewDelta) { lastDelta = newDelta; repeatCount = newCount; lastTime = newTime; } else { writeTime(lastTime, newTime, dataStream); lastTime = newTime; lastDelta = 0; repeatCount = 0; } } } else if (nonDeltaTime) { writeTime(lastTime, newTime, dataStream); lastTime = newTime; lastDelta = 0; repeatCount = 0; } else if (lastDelta == 0) { lastTime = newTime; repeatCount = newCount; lastDelta = newDelta; } } chunkCounter++; } if (repeatCount > 0) { writeRepeatedDelta(lastDelta, repeatCount, dataStream); } dataStream.flush(); return outputStream.toByteArray(); } catch (Exception e) { log.error(e, "In combineTimesLines(), exception combining timelines"); return new byte[0]; } }
From source file:org.apache.sshd.server.sftp.SftpSubsystem.java
public void run() { DataInputStream dis = null; try {// w w w . ja va2 s . co m dis = new DataInputStream(in); while (true) { int length = dis.readInt(); if (length < 5) { throw new IllegalArgumentException(); } Buffer buffer = new Buffer(length + 4); buffer.putInt(length); int nb = length; while (nb > 0) { int l = dis.read(buffer.array(), buffer.wpos(), nb); if (l < 0) { throw new IllegalArgumentException(); } buffer.wpos(buffer.wpos() + l); nb -= l; } process(buffer); } } catch (Throwable t) { if (!closed && !(t instanceof EOFException)) { // Ignore han log.error("Exception caught in SFTP subsystem", t); } } finally { if (dis != null) { try { dis.close(); } catch (IOException ioe) { log.error("Could not close DataInputStream", ioe); } } if (handles != null) { for (Map.Entry<String, Handle> entry : handles.entrySet()) { Handle handle = entry.getValue(); try { handle.close(); } catch (IOException ioe) { log.error("Could not close open handle: " + entry.getKey(), ioe); } } } dis = null; callback.onExit(0); } }
From source file:com.facebook.infrastructure.db.Column.java
/** * We know the name of the column here so just return it. * Filter is pretty much useless in this call and is ignored. *///from w w w. j a v a 2 s . c om public IColumn deserialize(DataInputStream dis, String columnName, IFilter filter) throws IOException { if (dis.available() == 0) return null; IColumn column = null; String name = dis.readUTF(); if (name.equals(columnName)) { column = defreeze(dis, name); if (filter instanceof IdentityFilter) { /* * If this is being called with identity filter * since a column name is passed in we know * that this is a final call * Hence if the column is found set the filter to done * so that we do not look for the column in further files */ IdentityFilter f = (IdentityFilter) filter; f.setDone(); } } else { /* Skip a boolean and the timestamp */ dis.skip(DBConstants.boolSize_ + DBConstants.tsSize_); int size = dis.readInt(); dis.skip(size); } return column; }
From source file:net.timewalker.ffmq4.storage.data.impl.AbstractBlockBasedDataStore.java
private final void loadAllocationTable() throws DataStoreException { log.debug(/*from w w w.j a v a 2s .c o m*/ "[" + descriptor.getName() + "] Loading allocation table " + allocationTableFile.getAbsolutePath()); DataInputStream in = null; try { in = new DataInputStream(new BufferedInputStream(new FileInputStream(allocationTableFile), 16384)); this.blockCount = in.readInt(); this.blockSize = in.readInt(); this.firstBlock = in.readInt(); this.flags = new byte[blockCount]; this.allocatedSize = new int[blockCount]; this.previousBlock = new int[blockCount]; this.nextBlock = new int[blockCount]; this.blocksInUse = 0; int msgCount = 0; for (int n = 0; n < blockCount; n++) { flags[n] = in.readByte(); allocatedSize[n] = in.readInt(); previousBlock[n] = in.readInt(); nextBlock[n] = in.readInt(); if (allocatedSize[n] != -1) { blocksInUse++; if ((flags[n] & FLAG_START_BLOCK) > 0) msgCount++; } } this.locks = new FastBitSet(blockCount); this.size = msgCount; log.debug("[" + descriptor.getName() + "] " + msgCount + " entries found"); } catch (EOFException e) { throw new DataStoreException("Allocation table is truncated : " + allocationTableFile.getAbsolutePath(), e); } catch (IOException e) { throw new DataStoreException( "Cannot initialize allocation table : " + allocationTableFile.getAbsolutePath(), e); } finally { if (in != null) { try { in.close(); } catch (IOException e) { log.error("[" + descriptor.getName() + "] Could not close file input stream", e); } } } }