List of usage examples for java.io DataOutput writeInt
void writeInt(int v) throws IOException;
int
value, which is comprised of four bytes, to the output stream. From source file:org.apache.isis.objectstore.nosql.db.file.server.FileServer.java
private void startSyncing() { final String syncHost = config.getString("fileserver.sync-host", DEFAULT_HOST); final int syncPort = config.getInt("fileserver.sync-port", DEFAULT_SYNC_PORT); final int connectionTimeout = config.getInt("fileserver.connection.timeout", 5000); LOG.info("preparing to sync to secondary server on " + syncHost + " port " + syncPort); final InetAddress address; try {/*from w ww . j a v a 2 s . c o m*/ address = InetAddress.getByName(syncHost); } catch (final UnknownHostException e) { LOG.error("Unknown host " + syncHost, e); System.exit(0); return; } while (awaitConnections) { Socket socket = null; try { socket = new Socket(address, syncPort); LOG.info("sync connected to " + socket.getInetAddress().getHostAddress() + " port " + socket.getLocalPort()); final CRC32 crc32 = new CRC32(); final DataOutput output = new DataOutputStream( new CheckedOutputStream(socket.getOutputStream(), crc32)); final DataInput input = new DataInputStream(socket.getInputStream()); output.writeByte(INIT); long logId = input.readLong(); do { final long nextLogId = logId + 1; final File file = Util.logFile(nextLogId); if (file.exists() && server.getLogger().isWritten(nextLogId)) { logId++; output.writeByte(RECOVERY_LOG); crc32.reset(); output.writeLong(logId); LOG.info("sending recovery file: " + file.getName()); final BufferedInputStream fileInput = new BufferedInputStream(new FileInputStream(file)); final byte[] buffer = new byte[8092]; int read; while ((read = fileInput.read(buffer)) > 0) { output.writeInt(read); output.write(buffer, 0, read); } output.writeInt(0); output.writeLong(crc32.getValue()); } try { Thread.sleep(300); } catch (final InterruptedException ignore) { } while (isQuiescent) { try { Thread.sleep(300); } catch (final InterruptedException ignore) { } } } while (awaitConnections); } catch (final ConnectException e) { LOG.warn("not yet connected to secondary server at " + syncHost + " port " + syncPort); try { Thread.sleep(connectionTimeout); } catch (final InterruptedException ignore) { } } catch (final IOException e) { LOG.error("start failure - networking not set up for " + syncHost, e); try { Thread.sleep(300); } catch (final InterruptedException ignore) { } } catch (final RuntimeException e) { LOG.error("start failure", e); try { Thread.sleep(300); } catch (final InterruptedException ignore) { } } } }
From source file:org.apache.giraph.partition.DiskBackedOnlineComputePartitionStore.java
/** * Writes vertex edges (Id, Edges) to stream. * /*from w ww . j a v a2 s .com*/ * @param output * The output stream * @param vertex * The vertex to serialize * @throws IOException */ @SuppressWarnings("unchecked") private void writeOutEdges(DataOutput output, Vertex<I, V, E, M> vertex) throws IOException { vertex.getId().write(output); if (vertex.getEdges() == null) { output.writeInt(0); return; } ((OutEdges<I, E>) vertex.getEdges()).write(output); }
From source file:org.apache.giraph.graph.BspServiceWorker.java
@Override public void storeCheckpoint() throws IOException { getContext()/*from w w w . jav a 2s .c om*/ .setStatus("storeCheckpoint: Starting checkpoint " + getGraphMapper().getMapFunctions().toString() + " - Attempt=" + getApplicationAttempt() + ", Superstep=" + getSuperstep()); // Algorithm: // For each partition, dump vertices and messages Path metadataFilePath = new Path(getCheckpointBasePath(getSuperstep()) + "." + getHostnamePartitionId() + CHECKPOINT_METADATA_POSTFIX); Path verticesFilePath = new Path(getCheckpointBasePath(getSuperstep()) + "." + getHostnamePartitionId() + CHECKPOINT_VERTICES_POSTFIX); Path validFilePath = new Path( getCheckpointBasePath(getSuperstep()) + "." + getHostnamePartitionId() + CHECKPOINT_VALID_POSTFIX); // Remove these files if they already exist (shouldn't though, unless // of previous failure of this worker) if (getFs().delete(validFilePath, false)) { LOG.warn("storeCheckpoint: Removed valid file " + validFilePath); } if (getFs().delete(metadataFilePath, false)) { LOG.warn("storeCheckpoint: Removed metadata file " + metadataFilePath); } if (getFs().delete(verticesFilePath, false)) { LOG.warn("storeCheckpoint: Removed file " + verticesFilePath); } FSDataOutputStream verticesOutputStream = getFs().create(verticesFilePath); ByteArrayOutputStream metadataByteStream = new ByteArrayOutputStream(); DataOutput metadataOutput = new DataOutputStream(metadataByteStream); for (Partition<I, V, E, M> partition : workerPartitionMap.values()) { long startPos = verticesOutputStream.getPos(); partition.write(verticesOutputStream); // Write the metadata for this partition // Format: // <index count> // <index 0 start pos><partition id> // <index 1 start pos><partition id> metadataOutput.writeLong(startPos); metadataOutput.writeInt(partition.getPartitionId()); if (LOG.isDebugEnabled()) { LOG.debug("storeCheckpoint: Vertex file starting " + "offset = " + startPos + ", length = " + (verticesOutputStream.getPos() - startPos) + ", partition = " + partition.toString()); } } // Metadata is buffered and written at the end since it's small and // needs to know how many partitions this worker owns FSDataOutputStream metadataOutputStream = getFs().create(metadataFilePath); metadataOutputStream.writeInt(workerPartitionMap.size()); metadataOutputStream.write(metadataByteStream.toByteArray()); metadataOutputStream.close(); verticesOutputStream.close(); if (LOG.isInfoEnabled()) { LOG.info("storeCheckpoint: Finished metadata (" + metadataFilePath + ") and vertices (" + verticesFilePath + ")."); } getFs().createNewFile(validFilePath); }
From source file:org.apache.hadoop.hbase.KeyValue.java
/** * Write out a KeyValue in the manner in which we used to when KeyValue was a Writable. * @param kv/* w w w . j a va 2 s.c o m*/ * @param out * @return Length written on stream * @throws IOException * @see #create(DataInput) for the inverse function */ public static long write(final KeyValue kv, final DataOutput out) throws IOException { // This is how the old Writables write used to serialize KVs. Need to figure way to make it // work for all implementations. int length = kv.getLength(); out.writeInt(length); out.write(kv.getBuffer(), kv.getOffset(), length); return length + Bytes.SIZEOF_INT; }
From source file:org.apache.pig.data.SchemaTuple.java
protected void write(DataOutput out, boolean writeIdentifiers) throws IOException { if (writeIdentifiers) { int id = getSchemaTupleIdentifier(); if (id < BinInterSedes.UNSIGNED_BYTE_MAX) { out.writeByte(BinInterSedes.SCHEMA_TUPLE_BYTE_INDEX); out.writeByte(id);/* w w w .j a v a2s . c o m*/ } else if (id < BinInterSedes.UNSIGNED_SHORT_MAX) { out.writeByte(BinInterSedes.SCHEMA_TUPLE_SHORT_INDEX); out.writeShort(id); } else { out.writeByte(BinInterSedes.SCHEMA_TUPLE); out.writeInt(id); } } writeElements(out); }
From source file:edu.umn.cs.spatialHadoop.core.RTree.java
/** * Builds the RTree given a serialized list of elements. It uses the given * stockObject to deserialize these elements using * {@link TextSerializable#fromText(Text)} and build the tree. Also writes the * created tree to the disk directly.//from w w w . j a v a 2 s.c om * * @param element_bytes * - serialization of all elements separated by new lines * @param offset * - offset of the first byte to use in elements_bytes * @param len * - number of bytes to use in elements_bytes * @param degree * - Degree of the R-tree to build in terms of number of children per * node * @param dataOut * - output stream to write the result to. * @param fast_sort * - setting this to <code>true</code> allows the method to run * faster by materializing the offset of each element in the list * which speeds up the comparison. However, this requires an * additional 16 bytes per element. So, for each 1M elements, the * method will require an additional 16 M bytes (approximately). */ public void bulkLoadWrite(final byte[] element_bytes, final int offset, final int len, final int degree, DataOutput dataOut, final boolean fast_sort) { try { // Count number of elements in the given text int i_start = offset; final Text line = new Text(); while (i_start < offset + len) { int i_end = skipToEOL(element_bytes, i_start); // Extract the line without end of line character line.set(element_bytes, i_start, i_end - i_start - 1); stockObject.fromText(line); elementCount++; i_start = i_end; } LOG.info("Bulk loading an RTree with " + elementCount + " elements"); // It turns out the findBestDegree returns the best degree when the whole // tree is loaded to memory when processed. However, as current algorithms // process the tree while it's on disk, a higher degree should be selected // such that a node fits one file block (assumed to be 4K). //final int degree = findBestDegree(bytesAvailable, elementCount); LOG.info("Writing an RTree with degree " + degree); int height = Math.max(1, (int) Math.ceil(Math.log(elementCount) / Math.log(degree))); int leafNodeCount = (int) Math.pow(degree, height - 1); if (elementCount < 2 * leafNodeCount && height > 1) { height--; leafNodeCount = (int) Math.pow(degree, height - 1); } int nodeCount = (int) ((Math.pow(degree, height) - 1) / (degree - 1)); int nonLeafNodeCount = nodeCount - leafNodeCount; // Keep track of the offset of each element in the text final int[] offsets = new int[elementCount]; final double[] xs = fast_sort ? new double[elementCount] : null; final double[] ys = fast_sort ? new double[elementCount] : null; i_start = offset; line.clear(); for (int i = 0; i < elementCount; i++) { offsets[i] = i_start; int i_end = skipToEOL(element_bytes, i_start); if (xs != null) { // Extract the line with end of line character line.set(element_bytes, i_start, i_end - i_start - 1); stockObject.fromText(line); // Sample center of the shape xs[i] = (stockObject.getMBR().x1 + stockObject.getMBR().x2) / 2; ys[i] = (stockObject.getMBR().y1 + stockObject.getMBR().y2) / 2; } i_start = i_end; } /**A struct to store information about a split*/ class SplitStruct extends Rectangle { /**Start and end index for this split*/ int index1, index2; /**Direction of this split*/ byte direction; /**Index of first element on disk*/ int offsetOfFirstElement; static final byte DIRECTION_X = 0; static final byte DIRECTION_Y = 1; SplitStruct(int index1, int index2, byte direction) { this.index1 = index1; this.index2 = index2; this.direction = direction; } @Override public void write(DataOutput out) throws IOException { out.writeInt(offsetOfFirstElement); super.write(out); } void partition(Queue<SplitStruct> toBePartitioned) { IndexedSortable sortableX; IndexedSortable sortableY; if (fast_sort) { // Use materialized xs[] and ys[] to do the comparisons sortableX = new IndexedSortable() { @Override public void swap(int i, int j) { // Swap xs double tempx = xs[i]; xs[i] = xs[j]; xs[j] = tempx; // Swap ys double tempY = ys[i]; ys[i] = ys[j]; ys[j] = tempY; // Swap id int tempid = offsets[i]; offsets[i] = offsets[j]; offsets[j] = tempid; } @Override public int compare(int i, int j) { if (xs[i] < xs[j]) return -1; if (xs[i] > xs[j]) return 1; return 0; } }; sortableY = new IndexedSortable() { @Override public void swap(int i, int j) { // Swap xs double tempx = xs[i]; xs[i] = xs[j]; xs[j] = tempx; // Swap ys double tempY = ys[i]; ys[i] = ys[j]; ys[j] = tempY; // Swap id int tempid = offsets[i]; offsets[i] = offsets[j]; offsets[j] = tempid; } @Override public int compare(int i, int j) { if (ys[i] < ys[j]) return -1; if (ys[i] > ys[j]) return 1; return 0; } }; } else { // No materialized xs and ys. Always deserialize objects to compare sortableX = new IndexedSortable() { @Override public void swap(int i, int j) { // Swap id int tempid = offsets[i]; offsets[i] = offsets[j]; offsets[j] = tempid; } @Override public int compare(int i, int j) { // Get end of line int eol = skipToEOL(element_bytes, offsets[i]); line.set(element_bytes, offsets[i], eol - offsets[i] - 1); stockObject.fromText(line); double xi = (stockObject.getMBR().x1 + stockObject.getMBR().x2) / 2; eol = skipToEOL(element_bytes, offsets[j]); line.set(element_bytes, offsets[j], eol - offsets[j] - 1); stockObject.fromText(line); double xj = (stockObject.getMBR().x1 + stockObject.getMBR().x2) / 2; if (xi < xj) return -1; if (xi > xj) return 1; return 0; } }; sortableY = new IndexedSortable() { @Override public void swap(int i, int j) { // Swap id int tempid = offsets[i]; offsets[i] = offsets[j]; offsets[j] = tempid; } @Override public int compare(int i, int j) { int eol = skipToEOL(element_bytes, offsets[i]); line.set(element_bytes, offsets[i], eol - offsets[i] - 1); stockObject.fromText(line); double yi = (stockObject.getMBR().y1 + stockObject.getMBR().y2) / 2; eol = skipToEOL(element_bytes, offsets[j]); line.set(element_bytes, offsets[j], eol - offsets[j] - 1); stockObject.fromText(line); double yj = (stockObject.getMBR().y1 + stockObject.getMBR().y2) / 2; if (yi < yj) return -1; if (yi > yj) return 1; return 0; } }; } final IndexedSorter sorter = new QuickSort(); final IndexedSortable[] sortables = new IndexedSortable[2]; sortables[SplitStruct.DIRECTION_X] = sortableX; sortables[SplitStruct.DIRECTION_Y] = sortableY; sorter.sort(sortables[direction], index1, index2); // Partition into maxEntries partitions (equally) and // create a SplitStruct for each partition int i1 = index1; for (int iSplit = 0; iSplit < degree; iSplit++) { int i2 = index1 + (index2 - index1) * (iSplit + 1) / degree; SplitStruct newSplit = new SplitStruct(i1, i2, (byte) (1 - direction)); toBePartitioned.add(newSplit); i1 = i2; } } } // All nodes stored in level-order traversal Vector<SplitStruct> nodes = new Vector<SplitStruct>(); final Queue<SplitStruct> toBePartitioned = new LinkedList<SplitStruct>(); toBePartitioned.add(new SplitStruct(0, elementCount, SplitStruct.DIRECTION_X)); while (!toBePartitioned.isEmpty()) { SplitStruct split = toBePartitioned.poll(); if (nodes.size() < nonLeafNodeCount) { // This is a non-leaf split.partition(toBePartitioned); } nodes.add(split); } if (nodes.size() != nodeCount) { throw new RuntimeException( "Expected node count: " + nodeCount + ". Real node count: " + nodes.size()); } // Now we have our data sorted in the required order. Start building // the tree. // Store the offset of each leaf node in the tree FSDataOutputStream fakeOut = null; try { fakeOut = new FSDataOutputStream(new java.io.OutputStream() { // Null output stream @Override public void write(int b) throws IOException { // Do nothing } @Override public void write(byte[] b, int off, int len) throws IOException { // Do nothing } @Override public void write(byte[] b) throws IOException { // Do nothing } }, null, TreeHeaderSize + nodes.size() * NodeSize); for (int i_leaf = nonLeafNodeCount, i = 0; i_leaf < nodes.size(); i_leaf++) { nodes.elementAt(i_leaf).offsetOfFirstElement = (int) fakeOut.getPos(); if (i != nodes.elementAt(i_leaf).index1) throw new RuntimeException(); double x1, y1, x2, y2; // Initialize MBR to first object int eol = skipToEOL(element_bytes, offsets[i]); fakeOut.write(element_bytes, offsets[i], eol - offsets[i]); line.set(element_bytes, offsets[i], eol - offsets[i] - 1); stockObject.fromText(line); Rectangle mbr = stockObject.getMBR(); x1 = mbr.x1; y1 = mbr.y1; x2 = mbr.x2; y2 = mbr.y2; i++; while (i < nodes.elementAt(i_leaf).index2) { eol = skipToEOL(element_bytes, offsets[i]); fakeOut.write(element_bytes, offsets[i], eol - offsets[i]); line.set(element_bytes, offsets[i], eol - offsets[i] - 1); stockObject.fromText(line); mbr = stockObject.getMBR(); if (mbr.x1 < x1) x1 = mbr.x1; if (mbr.y1 < y1) y1 = mbr.y1; if (mbr.x2 > x2) x2 = mbr.x2; if (mbr.y2 > y2) y2 = mbr.y2; i++; } nodes.elementAt(i_leaf).set(x1, y1, x2, y2); } } finally { if (fakeOut != null) fakeOut.close(); } // Calculate MBR and offsetOfFirstElement for non-leaves for (int i_node = nonLeafNodeCount - 1; i_node >= 0; i_node--) { int i_first_child = i_node * degree + 1; nodes.elementAt(i_node).offsetOfFirstElement = nodes.elementAt(i_first_child).offsetOfFirstElement; int i_child = 0; Rectangle mbr; mbr = nodes.elementAt(i_first_child + i_child); double x1 = mbr.x1; double y1 = mbr.y1; double x2 = mbr.x2; double y2 = mbr.y2; i_child++; while (i_child < degree) { mbr = nodes.elementAt(i_first_child + i_child); if (mbr.x1 < x1) x1 = mbr.x1; if (mbr.y1 < y1) y1 = mbr.y1; if (mbr.x2 > x2) x2 = mbr.x2; if (mbr.y2 > y2) y2 = mbr.y2; i_child++; } nodes.elementAt(i_node).set(x1, y1, x2, y2); } // Start writing the tree // write tree header (including size) // Total tree size. (== Total bytes written - 8 bytes for the size itself) dataOut.writeInt(TreeHeaderSize + NodeSize * nodeCount + len); // Tree height dataOut.writeInt(height); // Degree dataOut.writeInt(degree); dataOut.writeInt(elementCount); // write nodes for (SplitStruct node : nodes) { node.write(dataOut); } // write elements for (int element_i = 0; element_i < elementCount; element_i++) { int eol = skipToEOL(element_bytes, offsets[element_i]); dataOut.write(element_bytes, offsets[element_i], eol - offsets[element_i]); } } catch (IOException e) { e.printStackTrace(); } }
From source file:org.apache.pig.data.BinInterSedes.java
@Override @SuppressWarnings("unchecked") public void writeDatum(DataOutput out, Object val, byte type) throws IOException { switch (type) { case DataType.TUPLE: writeTuple(out, (Tuple) val); break;/*www . j ava 2 s. c o m*/ case DataType.BAG: writeBag(out, (DataBag) val); break; case DataType.MAP: { writeMap(out, (Map<String, Object>) val); break; } case DataType.INTERNALMAP: { out.writeByte(INTERNALMAP); Map<Object, Object> m = (Map<Object, Object>) val; out.writeInt(m.size()); Iterator<Map.Entry<Object, Object>> i = m.entrySet().iterator(); while (i.hasNext()) { Map.Entry<Object, Object> entry = i.next(); writeDatum(out, entry.getKey()); writeDatum(out, entry.getValue()); } break; } case DataType.INTEGER: int i = (Integer) val; if (i == 0) { out.writeByte(INTEGER_0); } else if (i == 1) { out.writeByte(INTEGER_1); } else if (Byte.MIN_VALUE <= i && i <= Byte.MAX_VALUE) { out.writeByte(INTEGER_INBYTE); out.writeByte(i); } else if (Short.MIN_VALUE <= i && i <= Short.MAX_VALUE) { out.writeByte(INTEGER_INSHORT); out.writeShort(i); } else { out.writeByte(INTEGER); out.writeInt(i); } break; case DataType.LONG: long lng = (Long) val; if (lng == 0) { out.writeByte(LONG_0); } else if (lng == 1) { out.writeByte(LONG_1); } else if (Byte.MIN_VALUE <= lng && lng <= Byte.MAX_VALUE) { out.writeByte(LONG_INBYTE); out.writeByte((int) lng); } else if (Short.MIN_VALUE <= lng && lng <= Short.MAX_VALUE) { out.writeByte(LONG_INSHORT); out.writeShort((int) lng); } else if (Integer.MIN_VALUE <= lng && lng <= Integer.MAX_VALUE) { out.writeByte(LONG_ININT); out.writeInt((int) lng); } else { out.writeByte(LONG); out.writeLong(lng); } break; case DataType.DATETIME: out.writeByte(DATETIME); out.writeLong(((DateTime) val).getMillis()); out.writeShort(((DateTime) val).getZone().getOffset((DateTime) val) / ONE_MINUTE); break; case DataType.FLOAT: out.writeByte(FLOAT); out.writeFloat((Float) val); break; case DataType.BIGINTEGER: out.writeByte(BIGINTEGER); writeBigInteger(out, (BigInteger) val); break; case DataType.BIGDECIMAL: out.writeByte(BIGDECIMAL); writeBigDecimal(out, (BigDecimal) val); break; case DataType.DOUBLE: out.writeByte(DOUBLE); out.writeDouble((Double) val); break; case DataType.BOOLEAN: if ((Boolean) val) out.writeByte(BOOLEAN_TRUE); else out.writeByte(BOOLEAN_FALSE); break; case DataType.BYTE: out.writeByte(BYTE); out.writeByte((Byte) val); break; case DataType.BYTEARRAY: { DataByteArray bytes = (DataByteArray) val; SedesHelper.writeBytes(out, bytes.mData); break; } case DataType.CHARARRAY: { SedesHelper.writeChararray(out, (String) val); break; } case DataType.GENERIC_WRITABLECOMPARABLE: out.writeByte(GENERIC_WRITABLECOMPARABLE); // store the class name, so we know the class to create on read writeDatum(out, val.getClass().getName()); Writable writable = (Writable) val; writable.write(out); break; case DataType.NULL: out.writeByte(NULL); break; default: throw new RuntimeException("Unexpected data type " + val.getClass().getName() + " found in stream. " + "Note only standard Pig type is supported when you output from UDF/LoadFunc"); } }
From source file:com.mobicage.rogerthat.plugins.messaging.BrandingMgr.java
@Override public void writePickle(DataOutput out) throws IOException { T.dontCare();//from www. j ava 2s . c om out.writeInt(mQueue.size()); for (BrandedItem item : mQueue) { out.writeUTF(JSONValue.toJSONString(item.toJSONMap())); } out.writeInt(mDownloadMgrQueue.size()); for (BrandedItem item : mDownloadMgrQueue.values()) { out.writeUTF(JSONValue.toJSONString(item.toJSONMap())); } }
From source file:com.ebay.erl.mobius.core.model.Tuple.java
/** * Serialize this tuple to the output <code>out</code>. * <p>// w w w .j a v a2 s. com * * When serialize, the values are stored in the order * of schema name's ordering. See {@link #setSchema(String[])} * for more explanation. */ @Override public void write(DataOutput out) throws IOException { // write the size of the column of this tuple out.writeInt(this.values.size()); if (this.values.size() != this.namesToIdxMapping.size()) { StringBuffer sb = new StringBuffer(); for (Object v : values) sb.append(v.toString()).append(","); throw new IllegalArgumentException(this.getClass().getCanonicalName() + ", the length of values and schmea is not the same, " + "very likely the schema of this tuple has not been set yet, please set it using Tuple#setSchema(String[])." + " Values:[" + sb.toString() + "] schema:" + this.namesToIdxMapping.keySet()); } WriteImpl writeImpl = new WriteImpl(out); for (String aColumnName : getSorted(this.namesToIdxMapping.keySet())) { Object value = this.values.get(this.namesToIdxMapping.get(aColumnName)); byte type = getType(value); out.write(type); writeImpl.setValue(value); writeImpl.handle(type); } }
From source file:org.apache.sysml.runtime.compress.CompressedMatrixBlock.java
@Override public void write(DataOutput out) throws IOException { out.writeBoolean(isCompressed());// w w w . j av a2s.c o m //serialize uncompressed block if (!isCompressed()) { super.write(out); return; } //serialize compressed matrix block out.writeInt(rlen); out.writeInt(clen); out.writeLong(nonZeros); out.writeInt(_colGroups.size()); for (ColGroup grp : _colGroups) { out.writeByte(grp.getCompType().ordinal()); grp.write(out); //delegate serialization } }