List of usage examples for java.io DataInputStream readShort
public final short readShort() throws IOException
readShort
method of DataInput
. From source file:org.apache.jxtadoop.hdfs.server.datanode.DataXceiver.java
/** * Read/write data from/to the DataXceiveServer. *//*ww w. j av a2 s .c om*/ public void run() { DataInputStream in = null; LOG.debug("DataXceiver starts processing new incoming data"); try { //in = new DataInputStream( // new BufferedInputStream(NetUtils.getInputStream(s), // SMALL_BUFFER_SIZE)); LOG.debug("Reading version from stream"); LOG.debug("DataXceiver socket connected : " + s.isConnected()); LOG.debug("DataXceiver socket closed : " + s.isClosed()); ReliableInputStream ris = (ReliableInputStream) s.getInputStream(); BufferedInputStream bis = new BufferedInputStream(ris); in = new DataInputStream(bis); short version = in.readShort(); LOG.debug("Version read : " + version); if (version != DataTransferProtocol.DATA_TRANSFER_VERSION) { throw new IOException("Version Mismatch"); } //boolean local = s.getInetAddress().equals(s.getLocalAddress()); boolean local = false; /** TODO A modifier proprement **/ LOG.debug("Reading op type from stream"); byte op = in.readByte(); LOG.debug("op type read : " + op); // Make sure the xciver count is not exceeded int curXceiverCount = datanode.getXceiverCount(); if (curXceiverCount > dataXceiverServer.maxXceiverCount) { throw new IOException("xceiverCount " + curXceiverCount + " exceeds the limit of concurrent xcievers " + dataXceiverServer.maxXceiverCount); } long startTime = DataNode.now(); switch (op) { case DataTransferProtocol.OP_READ_BLOCK: LOG.debug("Received a OP_READ_BLOCK op"); readBlock(in); datanode.myMetrics.readBlockOp.inc(DataNode.now() - startTime); if (local) datanode.myMetrics.readsFromLocalClient.inc(); else datanode.myMetrics.readsFromRemoteClient.inc(); break; case DataTransferProtocol.OP_WRITE_BLOCK: LOG.debug("Received a OP_WRITE_BLOCK op"); writeBlock(in); datanode.myMetrics.writeBlockOp.inc(DataNode.now() - startTime); if (local) datanode.myMetrics.writesFromLocalClient.inc(); else datanode.myMetrics.writesFromRemoteClient.inc(); break; case DataTransferProtocol.OP_READ_METADATA: LOG.debug("Received a OP_READ_METADATA op"); readMetadata(in); datanode.myMetrics.readMetadataOp.inc(DataNode.now() - startTime); break; case DataTransferProtocol.OP_REPLACE_BLOCK: // for balancing purpose; send to a destination LOG.debug("Received a OP_REPLACE_BLOCK op"); replaceBlock(in); datanode.myMetrics.replaceBlockOp.inc(DataNode.now() - startTime); break; case DataTransferProtocol.OP_COPY_BLOCK: // for balancing purpose; send to a proxy source LOG.debug("Received a OP_COPY_BLOCK op"); copyBlock(in); datanode.myMetrics.copyBlockOp.inc(DataNode.now() - startTime); break; case DataTransferProtocol.OP_BLOCK_CHECKSUM: //get the checksum of a block LOG.debug("Received a OP_BLOCK_CHECKSUM op"); getBlockChecksum(in); datanode.myMetrics.blockChecksumOp.inc(DataNode.now() - startTime); break; default: LOG.debug("Unknown op code"); throw new IOException("Unknown opcode " + op + " in data stream"); } IOUtils.closeStream(in); IOUtils.closeSocket(s); } catch (SocketTimeoutException ste) { LOG.debug("Time out while receiving data on DataXceiver"); LOG.debug(ste); ste.printStackTrace(); } catch (Exception t) { LOG.error(datanode.dnRegistration + ":DataXceiver FAILED", t); t.printStackTrace(); } finally { LOG.debug(datanode.dnRegistration + ":Number of active connections is: " + datanode.getXceiverCount()); // IOUtils.closeStream(in); // IOUtils.closeSocket(s); dataXceiverServer.childSockets.remove(s); s = null; } }
From source file:ClassFile.java
public boolean read(DataInputStream di, ConstantPoolInfo pool[]) throws IOException { int count;//from w w w . jav a 2s. co m accessFlags = di.readShort(); name = pool[di.readShort()]; signature = pool[di.readShort()]; count = di.readShort(); if (count != 0) { attributes = new AttributeInfo[count]; for (int i = 0; i < count; i++) { attributes[i] = new AttributeInfo(); if (!attributes[i].read(di, pool)) return (false); } } return (true); }
From source file:ClassFile.java
/** * Read a method_info from the data stream. *//*from ww w. j a v a 2 s . com*/ public boolean read(DataInputStream di, ConstantPoolInfo pool[]) throws IOException { int count; accessFlags = di.readShort(); name = pool[di.readShort()]; signature = pool[di.readShort()]; count = di.readShort(); if (count != 0) { attributes = new AttributeInfo[count]; for (int i = 0; i < count; i++) { attributes[i] = new AttributeInfo(); // "code" if (!attributes[i].read(di, pool)) { return (false); } } } return (true); }
From source file:ClassFile.java
public boolean read(DataInputStream dis) throws IOException { int len;/*from ww w . j av a 2s .c o m*/ char c; type = dis.readByte(); switch (type) { case CLASS: name = "Class"; index1 = dis.readShort(); index2 = -1; break; case FIELDREF: name = "Field Reference"; index1 = dis.readShort(); index2 = dis.readShort(); break; case METHODREF: name = "Method Reference"; index1 = dis.readShort(); index2 = dis.readShort(); break; case INTERFACE: name = "Interface Method Reference"; index1 = dis.readShort(); index2 = dis.readShort(); break; case NAMEANDTYPE: name = "Name and Type"; index1 = dis.readShort(); index2 = dis.readShort(); break; case STRING: name = "String"; index1 = dis.readShort(); index2 = -1; break; case INTEGER: name = "Integer"; intValue = dis.readInt(); break; case FLOAT: name = "Float"; floatValue = dis.readFloat(); break; case LONG: name = "Long"; longValue = dis.readLong(); break; case DOUBLE: name = "Double"; doubleValue = dis.readDouble(); break; case ASCIZ: case UNICODE: if (type == ASCIZ) name = "ASCIZ"; else name = "UNICODE"; StringBuffer xxBuf = new StringBuffer(); len = dis.readShort(); while (len > 0) { c = (char) (dis.readByte()); xxBuf.append(c); len--; } strValue = xxBuf.toString(); break; default: System.out.println("Warning bad type."); } return (true); }
From source file:ClassFile.java
/** * Read a class from InputStream <i>in</i>. *//*from w w w. j ava2 s. c om*/ public boolean read(InputStream in) throws IOException { DataInputStream di = new DataInputStream(in); int count; magic = di.readInt(); if (magic != (int) 0xCAFEBABE) { return (false); } majorVersion = di.readShort(); minorVersion = di.readShort(); count = di.readShort(); constantPool = new ConstantPoolInfo[count]; if (debug) System.out.println("read(): Read header..."); constantPool[0] = new ConstantPoolInfo(); for (int i = 1; i < constantPool.length; i++) { constantPool[i] = new ConstantPoolInfo(); if (!constantPool[i].read(di)) { return (false); } // These two types take up "two" spots in the table if ((constantPool[i].type == ConstantPoolInfo.LONG) || (constantPool[i].type == ConstantPoolInfo.DOUBLE)) i++; } /* * Update pointers in the constant table. This turns the * table into a real datastructure. * * TODO: Have it verify that the right arguments are present */ for (int i = 1; i < constantPool.length; i++) { if (constantPool[i] == null) continue; if (constantPool[i].index1 > 0) constantPool[i].arg1 = constantPool[constantPool[i].index1]; if (constantPool[i].index2 > 0) constantPool[i].arg2 = constantPool[constantPool[i].index2]; } if (dumpConstants) { for (int i = 1; i < constantPool.length; i++) { System.out.println("C" + i + " - " + constantPool[i]); } } accessFlags = di.readShort(); thisClass = constantPool[di.readShort()]; superClass = constantPool[di.readShort()]; if (debug) System.out.println("read(): Read class info..."); /* * Identify all of the interfaces implemented by this class */ count = di.readShort(); if (count != 0) { if (debug) System.out.println("Class implements " + count + " interfaces."); interfaces = new ConstantPoolInfo[count]; for (int i = 0; i < count; i++) { int iindex = di.readShort(); if ((iindex < 1) || (iindex > constantPool.length - 1)) return (false); interfaces[i] = constantPool[iindex]; if (debug) System.out.println("I" + i + ": " + interfaces[i]); } } if (debug) System.out.println("read(): Read interface info..."); /* * Identify all fields in this class. */ count = di.readShort(); if (debug) System.out.println("This class has " + count + " fields."); if (count != 0) { fields = new FieldInfo[count]; for (int i = 0; i < count; i++) { fields[i] = new FieldInfo(); if (!fields[i].read(di, constantPool)) { return (false); } if (debug) System.out.println("F" + i + ": " + fields[i].toString(constantPool)); } } if (debug) System.out.println("read(): Read field info..."); /* * Identify all the methods in this class. */ count = di.readShort(); if (count != 0) { methods = new MethodInfo[count]; for (int i = 0; i < count; i++) { methods[i] = new MethodInfo(); if (!methods[i].read(di, constantPool)) { return (false); } if (debug) System.out.println("M" + i + ": " + methods[i].toString()); } } if (debug) System.out.println("read(): Read method info..."); /* * Identify all of the attributes in this class */ count = di.readShort(); if (count != 0) { attributes = new AttributeInfo[count]; for (int i = 0; i < count; i++) { attributes[i] = new AttributeInfo(); if (!attributes[i].read(di, constantPool)) { return (false); } } } if (debug) { System.out.println("read(): Read attribute info..."); System.out.println("done."); } isValidClass = true; return (true); }
From source file:org.apache.hadoop.hdfs.server.datanode.DataXceiver.java
/** * Receive a block and write it to disk, it then notifies the namenode to * remove the copy from the source.//from w w w . j a va 2s . c o m * * @param in The stream to read from * @throws IOException */ private void replaceBlock(DataInputStream in) throws IOException { /* read header */ long blockId = in.readLong(); Block block = new Block(blockId, dataXceiverServer.estimateBlockSize, in.readLong()); // block id & generation stamp String sourceID = Text.readString(in); // read del hint DatanodeInfo proxySource = new DatanodeInfo(); // read proxy source proxySource.readFields(in); Token<BlockTokenIdentifier> accessToken = new Token<BlockTokenIdentifier>(); accessToken.readFields(in); if (datanode.isBlockTokenEnabled) { try { datanode.blockTokenSecretManager.checkAccess(accessToken, null, block, BlockTokenSecretManager.AccessMode.REPLACE); } catch (InvalidToken e) { LOG.warn("Invalid access token in request from " + remoteAddress + " for OP_REPLACE_BLOCK for block " + block); sendResponse(s, (short) DataTransferProtocol.OP_STATUS_ERROR_ACCESS_TOKEN, datanode.socketWriteTimeout); return; } } if (!dataXceiverServer.balanceThrottler.acquire()) { // not able to start LOG.warn("Not able to receive block " + blockId + " from " + s.getRemoteSocketAddress() + " because threads quota is exceeded."); sendResponse(s, (short) DataTransferProtocol.OP_STATUS_ERROR, datanode.socketWriteTimeout); return; } Socket proxySock = null; DataOutputStream proxyOut = null; short opStatus = DataTransferProtocol.OP_STATUS_SUCCESS; BlockReceiver blockReceiver = null; DataInputStream proxyReply = null; try { // get the output stream to the proxy InetSocketAddress proxyAddr = NetUtils.createSocketAddr(proxySource.getName()); proxySock = datanode.newSocket(); NetUtils.connect(proxySock, proxyAddr, datanode.socketTimeout); proxySock.setSoTimeout(datanode.socketTimeout); OutputStream baseStream = NetUtils.getOutputStream(proxySock, datanode.socketWriteTimeout); proxyOut = new DataOutputStream(new BufferedOutputStream(baseStream, SMALL_BUFFER_SIZE)); /* send request to the proxy */ proxyOut.writeShort(DataTransferProtocol.DATA_TRANSFER_VERSION); // transfer version proxyOut.writeByte(DataTransferProtocol.OP_COPY_BLOCK); // op code proxyOut.writeLong(block.getBlockId()); // block id proxyOut.writeLong(block.getGenerationStamp()); // block id accessToken.write(proxyOut); proxyOut.flush(); // receive the response from the proxy proxyReply = new DataInputStream( new BufferedInputStream(NetUtils.getInputStream(proxySock), BUFFER_SIZE)); short status = proxyReply.readShort(); if (status != DataTransferProtocol.OP_STATUS_SUCCESS) { if (status == DataTransferProtocol.OP_STATUS_ERROR_ACCESS_TOKEN) { throw new IOException("Copy block " + block + " from " + proxySock.getRemoteSocketAddress() + " failed due to access token error"); } throw new IOException( "Copy block " + block + " from " + proxySock.getRemoteSocketAddress() + " failed"); } // open a block receiver and check if the block does not exist blockReceiver = new BlockReceiver(block, proxyReply, proxySock.getRemoteSocketAddress().toString(), proxySock.getLocalSocketAddress().toString(), false, "", null, datanode); // receive a block blockReceiver.receiveBlock(null, null, null, null, dataXceiverServer.balanceThrottler, -1); // notify name node datanode.notifyNamenodeReceivedBlock(block, sourceID); LOG.info("Moved block " + block + " from " + s.getRemoteSocketAddress()); } catch (IOException ioe) { opStatus = DataTransferProtocol.OP_STATUS_ERROR; throw ioe; } finally { // receive the last byte that indicates the proxy released its thread resource if (opStatus == DataTransferProtocol.OP_STATUS_SUCCESS) { try { proxyReply.readChar(); } catch (IOException ignored) { } } // now release the thread resource dataXceiverServer.balanceThrottler.release(); // send response back try { sendResponse(s, opStatus, datanode.socketWriteTimeout); } catch (IOException ioe) { LOG.warn("Error writing reply back to " + s.getRemoteSocketAddress()); } IOUtils.closeStream(proxyOut); IOUtils.closeStream(blockReceiver); IOUtils.closeStream(proxyReply); } }
From source file:net.sergetk.mobile.lcdui.BitmapFont.java
/** * Creates a new font from the resource. The capacity of the color cache defines maximum size of * the color cache.//from w w w .j a va2 s .co m * * @param fontPath * the resource name * @param colorCacheCapacity * the maximum color cache size */ public BitmapFont(String fontPath, int colorCacheCapacity) { this.style = Font.STYLE_PLAIN; this.currentColor = 0; this.colorCache = new CacheEntry[colorCacheCapacity]; this.colorUsageCounts = new IntHashMap(colorCacheCapacity * 2); try { InputStream input = new Object().getClass().getResourceAsStream(fontPath); if (input == null) { throw new IOException(); } DataInputStream data = new DataInputStream(input); int streamLen = data.available(); this.fontFilePath = fontPath; this.version = data.readByte(); this.height = data.readByte(); this.baseline = data.readByte(); this.xIndent = data.readByte(); this.yIndent = data.readByte(); this.spaceWidth = data.readByte(); characterMap = data.readUTF(); int count = characterMap.length(); // read characters widthes this.widths = new int[count]; this.x = new int[count]; this.y = new int[count]; for (int i = 0; i < count; i++) { widths[i] = data.readByte(); } baseImage = null; // the original implementation supported multiple-images // in the font file, but this is not necessary. Because I do // not want to change the encoding, I am leaving this byte that // used to represent the number of PNGs in the file data.skipBytes(1); short pngLen = data.readShort(); byte[] buffer = new byte[pngLen]; data.read(buffer, 0, pngLen); this.pngOffset = (short) (streamLen - pngLen); baseImage = Image.createImage(buffer, 0, pngLen); currentImage = baseImage; // calculate characters coordinates int curX = 0, curY = 0; for (int i = 0; i < count; i++) { if (widths[i] < 0) { // negative width points to another character int sourceIndex = -widths[i]; widths[i] = widths[sourceIndex]; x[i] = x[sourceIndex]; y[i] = y[sourceIndex]; } else { x[i] = curX; y[i] = curY; curX += widths[i]; } } if (defaultFont == null) defaultFont = this; } catch (IOException e) { // Log.warn("IOException reading font: ", e); System.err.println("IOException reading font: " + e.getMessage()); e.printStackTrace(); } }
From source file:ClassFile.java
/** * Write out a text version of this class. *//*from w w w . j a v a 2s . co m*/ public void display(PrintStream ps) throws Exception { int i; String myClassName; String mySuperClassName; String packageName = null; if (!isValidClass) { ps.println("Not a valid class"); } myClassName = printClassName(thisClass.arg1.strValue); mySuperClassName = printClassName(superClass.arg1.strValue); if (myClassName.indexOf('.') > 0) { packageName = myClassName.substring(0, myClassName.lastIndexOf('.')); myClassName = myClassName.substring(myClassName.lastIndexOf('.') + 1); ps.println("package " + packageName + "\n"); } for (i = 1; i < constantPool.length; i++) { if (constantPool[i] == null) continue; if ((constantPool[i] == thisClass) || (constantPool[i] == superClass)) continue; if (constantPool[i].type == ConstantPoolInfo.CLASS) { String s = constantPool[i].arg1.strValue; if (s.charAt(0) == '[') continue; s = printClassName(constantPool[i].arg1.strValue); if ((packageName != null) && (s.startsWith(packageName))) continue; ps.println("import " + printClassName(s) + ";"); } } ps.println(); ps.println("/*"); DataInputStream dis; ConstantPoolInfo cpi; if (attributes != null) { ps.println(" * This class has " + attributes.length + " optional class attributes."); ps.println(" * These attributes are: "); for (i = 0; i < attributes.length; i++) { String attrName = attributes[i].name.strValue; dis = new DataInputStream(new ByteArrayInputStream(attributes[i].data)); ps.println(" * Attribute " + (i + 1) + " is of type " + attributes[i].name); if (attrName.compareTo("SourceFile") == 0) { cpi = null; try { cpi = constantPool[dis.readShort()]; } catch (IOException e) { } ps.println(" * SourceFile : " + cpi); } else { ps.println(" * TYPE (" + attrName + ")"); } } } else { ps.println(" * This class has NO optional class attributes."); } ps.println(" */\n"); ps.print(accessString(accessFlags) + "class " + myClassName + " extends " + mySuperClassName); if (interfaces != null) { ps.print(" implements "); for (i = 0; i < interfaces.length - 1; i++) { ps.print(interfaces[i].arg1.strValue + ", "); } ps.print(interfaces[interfaces.length - 1].arg1.strValue); } ps.println(" {\n"); if (fields != null) { ps.println("/* Instance Variables */"); for (i = 0; i < fields.length; i++) { ps.println(" " + fields[i].toString(constantPool) + ";"); } } if (methods != null) { ps.println("\n/* Methods */"); for (i = 0; i < methods.length; i++) { ps.println(" " + methods[i].toString(myClassName)); } } ps.println("\n}"); }
From source file:org.apache.hadoop.dfs.DataNode.java
private static void receiveResponse(Socket s, int numTargets) throws IOException { // check the response DataInputStream reply = new DataInputStream( new BufferedInputStream(NetUtils.getInputStream(s), BUFFER_SIZE)); try {/*from ww w. j av a2 s . c o m*/ for (int i = 0; i < numTargets; i++) { short opStatus = reply.readShort(); if (opStatus != OP_STATUS_SUCCESS) { throw new IOException("operation failed at " + s.getInetAddress()); } } } finally { IOUtils.closeStream(reply); } }
From source file:org.apache.jxtadoop.hdfs.server.datanode.DataXceiver.java
/** * Read a block from the disk.//from ww w. j av a2 s. co m * @param in The stream to read from * @throws IOException */ private void readBlock(DataInputStream in) throws IOException { LOG.debug("Mathod called : readBlock()"); // // Read in the header // long blockId = in.readLong(); Block block = new Block(blockId, 0, in.readLong()); long startOffset = in.readLong(); long length = in.readLong(); String clientName = Text.readString(in); // send the block // OutputStream baseStream = NetUtils.getOutputStream(s, datanode.socketWriteTimeout); OutputStream baseStream = s.getOutputStream(); // DataOutputStream out = new DataOutputStream( // new BufferedOutputStream(baseStream, SMALL_BUFFER_SIZE)); DataOutputStream out = new DataOutputStream(new BufferedOutputStream(baseStream)); BlockSender blockSender = null; final String clientTraceFmt = clientName.length() > 0 && ClientTraceLog.isInfoEnabled() ? String.format(DN_CLIENTTRACE_FORMAT, localAddress, remoteAddress, "%d", "HDFS_READ", clientName, datanode.dnRegistration.getStorageID(), block) : datanode.dnRegistration + " Served block " + block + " to " + s.getInetAddress(); try { try { blockSender = new BlockSender(block, startOffset, length, true, true, false, datanode, clientTraceFmt); } catch (IOException e) { out.writeShort(DataTransferProtocol.OP_STATUS_ERROR); throw e; } out.writeShort(DataTransferProtocol.OP_STATUS_SUCCESS); // send op status long read = blockSender.sendBlock(out, baseStream, null); // send data if (blockSender.isBlockReadFully()) { // See if client verification succeeded. // This is an optional response from client. try { if (in.readShort() == DataTransferProtocol.OP_STATUS_CHECKSUM_OK && datanode.blockScanner != null) { datanode.blockScanner.verifiedByClient(block); } } catch (IOException ignored) { } } datanode.myMetrics.bytesRead.inc((int) read); datanode.myMetrics.blocksRead.inc(); } catch (SocketException ignored) { // Its ok for remote side to close the connection anytime. datanode.myMetrics.blocksRead.inc(); } catch (IOException ioe) { /* What exactly should we do here? * Earlier version shutdown() datanode if there is disk error. */ LOG.warn(datanode.dnRegistration + ":Got exception while serving " + block + " to " + s.getInetAddress() + ":\n" + StringUtils.stringifyException(ioe)); throw ioe; } finally { LOG.debug("Finalizing : readBlock()"); IOUtils.closeStream(out); IOUtils.closeStream(blockSender); } }