List of usage examples for java.io DataOutputStream writeInt
public final void writeInt(int v) throws IOException
int
to the underlying output stream as four bytes, high byte first. From source file:org.squidy.nodes.iPhone.java
/** * @param image/*from ww w .j ava2 s .c o m*/ */ public void showButton(int x, int y, int actionType, BufferedImage image) { try { ByteArrayOutputStream baos = new ByteArrayOutputStream(); ImageIO.write(image, "png", baos); byte[] bytes = baos.toByteArray(); System.out.println("BYTES TO READ: " + bytes.length); for (Socket client : outputStreams.keySet()) { try { DataOutputStream outputStream = outputStreams.get(client); outputStream.writeInt(x); outputStream.writeInt(y); outputStream.writeInt(actionType); outputStream.writeInt(bytes.length); outputStream.write(bytes); outputStream.flush(); } catch (SocketException e) { outputStreams.remove(client); } } } catch (IOException e) { e.printStackTrace(); publishFailure(e); } }
From source file:com.linkedin.pinot.core.common.datatable.DataTableImplV2.java
private byte[] serializeDictionaryMap() throws IOException { ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); DataOutputStream dataOutputStream = new DataOutputStream(byteArrayOutputStream); dataOutputStream.writeInt(_dictionaryMap.size()); for (Entry<String, Map<Integer, String>> dictionaryMapEntry : _dictionaryMap.entrySet()) { String columnName = dictionaryMapEntry.getKey(); Map<Integer, String> dictionary = dictionaryMapEntry.getValue(); byte[] bytes = columnName.getBytes(UTF_8); dataOutputStream.writeInt(bytes.length); dataOutputStream.write(bytes);/*w w w . j a v a2 s .com*/ dataOutputStream.writeInt(dictionary.size()); for (Entry<Integer, String> dictionaryEntry : dictionary.entrySet()) { dataOutputStream.writeInt(dictionaryEntry.getKey()); byte[] valueBytes = dictionaryEntry.getValue().getBytes(UTF_8); dataOutputStream.writeInt(valueBytes.length); dataOutputStream.write(valueBytes); } } return byteArrayOutputStream.toByteArray(); }
From source file:org.apache.hadoop.hbase.security.HBaseSaslRpcClient.java
/** * Do client side SASL authentication with server via the given InputStream * and OutputStream// w w w.ja v a 2 s .c om * * @param inS * InputStream to use * @param outS * OutputStream to use * @return true if connection is set up, or false if needs to switch * to simple Auth. * @throws IOException */ public boolean saslConnect(InputStream inS, OutputStream outS) throws IOException { DataInputStream inStream = new DataInputStream(new BufferedInputStream(inS)); DataOutputStream outStream = new DataOutputStream(new BufferedOutputStream(outS)); try { byte[] saslToken = new byte[0]; if (saslClient.hasInitialResponse()) saslToken = saslClient.evaluateChallenge(saslToken); if (saslToken != null) { outStream.writeInt(saslToken.length); outStream.write(saslToken, 0, saslToken.length); outStream.flush(); if (LOG.isDebugEnabled()) LOG.debug("Have sent token of size " + saslToken.length + " from initSASLContext."); } if (!saslClient.isComplete()) { readStatus(inStream); int len = inStream.readInt(); if (len == SaslUtil.SWITCH_TO_SIMPLE_AUTH) { if (!fallbackAllowed) { throw new IOException("Server asks us to fall back to SIMPLE auth, " + "but this client is configured to only allow secure connections."); } if (LOG.isDebugEnabled()) { LOG.debug("Server asks us to fall back to simple auth."); } saslClient.dispose(); return false; } saslToken = new byte[len]; if (LOG.isDebugEnabled()) LOG.debug("Will read input token of size " + saslToken.length + " for processing by initSASLContext"); inStream.readFully(saslToken); } while (!saslClient.isComplete()) { saslToken = saslClient.evaluateChallenge(saslToken); if (saslToken != null) { if (LOG.isDebugEnabled()) LOG.debug("Will send token of size " + saslToken.length + " from initSASLContext."); outStream.writeInt(saslToken.length); outStream.write(saslToken, 0, saslToken.length); outStream.flush(); } if (!saslClient.isComplete()) { readStatus(inStream); saslToken = new byte[inStream.readInt()]; if (LOG.isDebugEnabled()) LOG.debug("Will read input token of size " + saslToken.length + " for processing by initSASLContext"); inStream.readFully(saslToken); } } if (LOG.isDebugEnabled()) { LOG.debug("SASL client context established. Negotiated QoP: " + saslClient.getNegotiatedProperty(Sasl.QOP)); } return true; } catch (IOException e) { try { saslClient.dispose(); } catch (SaslException ignored) { // ignore further exceptions during cleanup } throw e; } }
From source file:org.apache.hadoop.hdfs.tools.offlineImageViewer.TestOfflineImageViewer.java
private void changeLayoutVersion(File src, File dest, int newVersion) throws IOException { DataInputStream in = null;/*from w ww . java 2 s. com*/ DataOutputStream out = null; try { in = new DataInputStream(new FileInputStream(src)); out = new DataOutputStream(new FileOutputStream(dest)); in.readInt(); out.writeInt(newVersion); byte[] b = new byte[1024]; while (in.read(b) > 0) { out.write(b); } } finally { if (in != null) in.close(); if (out != null) out.close(); } }
From source file:org.outerrim.snippad.ui.swt.dnd.WikiTransfer.java
/** * Writes the given WikiWord to the output stream. * * @param word/*from w w w . ja va 2 s . c o m*/ * The word to write * @param dataOut * The stream to write to * @throws IOException */ private void writeWikiWord(final WikiWord word, final DataOutputStream dataOut) throws IOException { /* * Serialization format is as follows: (String) name of the word * (String) wiki text (int) number of child words (WikiWord) child 1 ... * repeat for each child */ dataOut.writeUTF(word.getName()); dataOut.writeUTF(word.getWikiText()); List children = word.getWikiWords(); LOG.debug("Word has " + children.size() + " children"); dataOut.writeInt(children.size()); for (int i = 0, size = children.size(); i < size; ++i) { writeWikiWord((WikiWord) children.get(i), dataOut); } }
From source file:org.ozsoft.xantippe.filestore.FileStore.java
/** * Writes the index file.//from w w w.j a v a2 s . co m * * @throws IOException if the file could not be written */ private void writeIndexFile() throws IOException { File file = new File(dataDir, INDEX_FILE); DataOutputStream dos = new DataOutputStream(new FileOutputStream(file)); dos.writeInt(entries.size()); for (FileEntry entry : entries.values()) { dos.writeInt(entry.getId()); dos.writeInt(entry.getOffset()); dos.writeInt(entry.getLength()); } dos.close(); }
From source file:org.apache.hama.ipc.AsyncServer.java
/** * Setup response for the IPC Call.//from ww w .j a va2 s .c o m * * @param response buffer to serialize the response into * @param call {@link Call} to which we are setting up the response * @param status {@link Status} of the IPC call * @param rv return value for the IPC Call, if the call was successful * @param errorClass error class, if the the call failed * @param error error message, if the call failed * @throws IOException */ private void setupResponse(ByteArrayOutputStream response, Call call, Status status, Writable rv, String errorClass, String error) throws IOException { response.reset(); DataOutputStream out = new DataOutputStream(response); out.writeInt(call.id); // write call id out.writeInt(status.state); // write status if (status == Status.SUCCESS) { rv.write(out); } else { WritableUtils.writeString(out, errorClass); WritableUtils.writeString(out, error); } call.setResponse(ByteBuffer.wrap(response.toByteArray())); IOUtils.closeStream(out); }
From source file:com.ning.arecibo.util.timeline.times.TimelineCoderImpl.java
private void writeTime(final int lastTime, final int newTime, final DataOutputStream dataStream) throws IOException { if (newTime > lastTime) { final int delta = (newTime - lastTime); if (delta <= TimelineOpcode.MAX_DELTA_TIME) { dataStream.writeByte(delta); } else {/*ww w . jav a2s . c om*/ dataStream.writeByte(TimelineOpcode.FULL_TIME.getOpcodeIndex()); dataStream.writeInt(newTime); } } else if (newTime == lastTime) { dataStream.writeByte(0); } }
From source file:com.joey.software.MoorFLSI.RepeatImageTextReader.java
public void saveData(File f) throws IOException { DataOutputStream out = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(f))); out.writeInt(imageData.size()); out.writeInt(wide);/*from www . j a v a 2 s . c o m*/ out.writeInt(high); for (int i = 0; i < imageData.size(); i++) { System.out.println(i); out.writeLong(imageTime.get(i).getTime()); for (int x = 0; x < wide; x++) { for (int y = 0; y < high; y++) { out.writeShort(imageData.get(i)[x][y]); } } } out.close(); }
From source file:com.linkedin.pinot.core.common.datatable.DataTableImplV2.java
@Nonnull @Override//from w w w. j a va 2s . c o m public byte[] toBytes() throws IOException { ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); DataOutputStream dataOutputStream = new DataOutputStream(byteArrayOutputStream); dataOutputStream.writeInt(VERSION); dataOutputStream.writeInt(_numRows); dataOutputStream.writeInt(_numColumns); int dataOffset = HEADER_SIZE; // Write dictionary. dataOutputStream.writeInt(dataOffset); byte[] dictionaryMapBytes = null; if (_dictionaryMap != null) { dictionaryMapBytes = serializeDictionaryMap(); dataOutputStream.writeInt(dictionaryMapBytes.length); dataOffset += dictionaryMapBytes.length; } else { dataOutputStream.writeInt(0); } // Write metadata. dataOutputStream.writeInt(dataOffset); byte[] metadataBytes = serializeMetadata(); dataOutputStream.writeInt(metadataBytes.length); dataOffset += metadataBytes.length; // Write data schema. dataOutputStream.writeInt(dataOffset); byte[] dataSchemaBytes = null; if (_dataSchema != null) { dataSchemaBytes = _dataSchema.toBytes(); dataOutputStream.writeInt(dataSchemaBytes.length); dataOffset += dataSchemaBytes.length; } else { dataOutputStream.writeInt(0); } // Write fixed size data. dataOutputStream.writeInt(dataOffset); if (_fixedSizeDataBytes != null) { dataOutputStream.writeInt(_fixedSizeDataBytes.length); dataOffset += _fixedSizeDataBytes.length; } else { dataOutputStream.writeInt(0); } // Write variable size data. dataOutputStream.writeInt(dataOffset); if (_variableSizeDataBytes != null) { dataOutputStream.writeInt(_variableSizeDataBytes.length); } else { dataOutputStream.writeInt(0); } // Write actual data. if (dictionaryMapBytes != null) { dataOutputStream.write(dictionaryMapBytes); } dataOutputStream.write(metadataBytes); if (dataSchemaBytes != null) { dataOutputStream.write(dataSchemaBytes); } if (_fixedSizeDataBytes != null) { dataOutputStream.write(_fixedSizeDataBytes); } if (_variableSizeDataBytes != null) { dataOutputStream.write(_variableSizeDataBytes); } return byteArrayOutputStream.toByteArray(); }