List of usage examples for java.io DataOutput writeInt
void writeInt(int v) throws IOException;
int
value, which is comprised of four bytes, to the output stream. From source file:edu.umn.cs.spatialHadoop.indexing.RTree.java
/** * Builds the RTree given a serialized list of elements. It uses the given * stockObject to deserialize these elements using * {@link TextSerializable#fromText(Text)} and build the tree. Also writes the * created tree to the disk directly./*from www .ja va 2s .com*/ * * @param element_bytes * - serialization of all elements separated by new lines * @param offset * - offset of the first byte to use in elements_bytes * @param len * - number of bytes to use in elements_bytes * @param degree * - Degree of the R-tree to build in terms of number of children per * node * @param dataOut * - output stream to write the result to. * @param fast_sort * - setting this to <code>true</code> allows the method to run * faster by materializing the offset of each element in the list * which speeds up the comparison. However, this requires an * additional 16 bytes per element. So, for each 1M elements, the * method will require an additional 16 M bytes (approximately). */ public static void bulkLoadWrite(final byte[] element_bytes, final int offset, final int len, final int degree, DataOutput dataOut, final Shape stockObject, final boolean fast_sort) { try { int elementCount = 0; // Count number of elements in the given text int i_start = offset; final Text line = new Text(); while (i_start < offset + len) { int i_end = skipToEOL(element_bytes, i_start); // Extract the line without end of line character line.set(element_bytes, i_start, i_end - i_start - 1); stockObject.fromText(line); elementCount++; i_start = i_end; } LOG.info("Bulk loading an RTree with " + elementCount + " elements"); // It turns out the findBestDegree returns the best degree when the whole // tree is loaded to memory when processed. However, as current algorithms // process the tree while it's on disk, a higher degree should be selected // such that a node fits one file block (assumed to be 4K). //final int degree = findBestDegree(bytesAvailable, elementCount); int height = Math.max(1, (int) Math.ceil(Math.log(elementCount) / Math.log(degree))); int leafNodeCount = (int) Math.pow(degree, height - 1); if (elementCount < 2 * leafNodeCount && height > 1) { height--; leafNodeCount = (int) Math.pow(degree, height - 1); } int nodeCount = (int) ((Math.pow(degree, height) - 1) / (degree - 1)); int nonLeafNodeCount = nodeCount - leafNodeCount; // Keep track of the offset of each element in the text final int[] offsets = new int[elementCount]; final double[] xs = fast_sort ? new double[elementCount] : null; final double[] ys = fast_sort ? new double[elementCount] : null; i_start = offset; line.clear(); for (int i = 0; i < elementCount; i++) { offsets[i] = i_start; int i_end = skipToEOL(element_bytes, i_start); if (xs != null) { // Extract the line with end of line character line.set(element_bytes, i_start, i_end - i_start - 1); stockObject.fromText(line); // Sample center of the shape xs[i] = (stockObject.getMBR().x1 + stockObject.getMBR().x2) / 2; ys[i] = (stockObject.getMBR().y1 + stockObject.getMBR().y2) / 2; } i_start = i_end; } /**A struct to store information about a split*/ class SplitStruct extends Rectangle { /**Start and end index for this split*/ int index1, index2; /**Direction of this split*/ byte direction; /**Index of first element on disk*/ int offsetOfFirstElement; static final byte DIRECTION_X = 0; static final byte DIRECTION_Y = 1; SplitStruct(int index1, int index2, byte direction) { this.index1 = index1; this.index2 = index2; this.direction = direction; } @Override public void write(DataOutput out) throws IOException { out.writeInt(offsetOfFirstElement); super.write(out); } void partition(Queue<SplitStruct> toBePartitioned) { IndexedSortable sortableX; IndexedSortable sortableY; if (fast_sort) { // Use materialized xs[] and ys[] to do the comparisons sortableX = new IndexedSortable() { @Override public void swap(int i, int j) { // Swap xs double tempx = xs[i]; xs[i] = xs[j]; xs[j] = tempx; // Swap ys double tempY = ys[i]; ys[i] = ys[j]; ys[j] = tempY; // Swap id int tempid = offsets[i]; offsets[i] = offsets[j]; offsets[j] = tempid; } @Override public int compare(int i, int j) { if (xs[i] < xs[j]) return -1; if (xs[i] > xs[j]) return 1; return 0; } }; sortableY = new IndexedSortable() { @Override public void swap(int i, int j) { // Swap xs double tempx = xs[i]; xs[i] = xs[j]; xs[j] = tempx; // Swap ys double tempY = ys[i]; ys[i] = ys[j]; ys[j] = tempY; // Swap id int tempid = offsets[i]; offsets[i] = offsets[j]; offsets[j] = tempid; } @Override public int compare(int i, int j) { if (ys[i] < ys[j]) return -1; if (ys[i] > ys[j]) return 1; return 0; } }; } else { // No materialized xs and ys. Always deserialize objects to compare sortableX = new IndexedSortable() { @Override public void swap(int i, int j) { // Swap id int tempid = offsets[i]; offsets[i] = offsets[j]; offsets[j] = tempid; } @Override public int compare(int i, int j) { // Get end of line int eol = skipToEOL(element_bytes, offsets[i]); line.set(element_bytes, offsets[i], eol - offsets[i] - 1); stockObject.fromText(line); double xi = (stockObject.getMBR().x1 + stockObject.getMBR().x2) / 2; eol = skipToEOL(element_bytes, offsets[j]); line.set(element_bytes, offsets[j], eol - offsets[j] - 1); stockObject.fromText(line); double xj = (stockObject.getMBR().x1 + stockObject.getMBR().x2) / 2; if (xi < xj) return -1; if (xi > xj) return 1; return 0; } }; sortableY = new IndexedSortable() { @Override public void swap(int i, int j) { // Swap id int tempid = offsets[i]; offsets[i] = offsets[j]; offsets[j] = tempid; } @Override public int compare(int i, int j) { int eol = skipToEOL(element_bytes, offsets[i]); line.set(element_bytes, offsets[i], eol - offsets[i] - 1); stockObject.fromText(line); double yi = (stockObject.getMBR().y1 + stockObject.getMBR().y2) / 2; eol = skipToEOL(element_bytes, offsets[j]); line.set(element_bytes, offsets[j], eol - offsets[j] - 1); stockObject.fromText(line); double yj = (stockObject.getMBR().y1 + stockObject.getMBR().y2) / 2; if (yi < yj) return -1; if (yi > yj) return 1; return 0; } }; } final IndexedSorter sorter = new QuickSort(); final IndexedSortable[] sortables = new IndexedSortable[2]; sortables[SplitStruct.DIRECTION_X] = sortableX; sortables[SplitStruct.DIRECTION_Y] = sortableY; sorter.sort(sortables[direction], index1, index2); // Partition into maxEntries partitions (equally) and // create a SplitStruct for each partition int i1 = index1; for (int iSplit = 0; iSplit < degree; iSplit++) { int i2 = index1 + (index2 - index1) * (iSplit + 1) / degree; SplitStruct newSplit = new SplitStruct(i1, i2, (byte) (1 - direction)); toBePartitioned.add(newSplit); i1 = i2; } } } // All nodes stored in level-order traversal Vector<SplitStruct> nodes = new Vector<SplitStruct>(); final Queue<SplitStruct> toBePartitioned = new LinkedList<SplitStruct>(); toBePartitioned.add(new SplitStruct(0, elementCount, SplitStruct.DIRECTION_X)); while (!toBePartitioned.isEmpty()) { SplitStruct split = toBePartitioned.poll(); if (nodes.size() < nonLeafNodeCount) { // This is a non-leaf split.partition(toBePartitioned); } nodes.add(split); } if (nodes.size() != nodeCount) { throw new RuntimeException( "Expected node count: " + nodeCount + ". Real node count: " + nodes.size()); } // Now we have our data sorted in the required order. Start building // the tree. // Store the offset of each leaf node in the tree FSDataOutputStream fakeOut = null; try { fakeOut = new FSDataOutputStream(new java.io.OutputStream() { // Null output stream @Override public void write(int b) throws IOException { // Do nothing } @Override public void write(byte[] b, int off, int len) throws IOException { // Do nothing } @Override public void write(byte[] b) throws IOException { // Do nothing } }, null, TreeHeaderSize + nodes.size() * NodeSize); for (int i_leaf = nonLeafNodeCount, i = 0; i_leaf < nodes.size(); i_leaf++) { nodes.elementAt(i_leaf).offsetOfFirstElement = (int) fakeOut.getPos(); if (i != nodes.elementAt(i_leaf).index1) throw new RuntimeException(); double x1, y1, x2, y2; // Initialize MBR to first object int eol = skipToEOL(element_bytes, offsets[i]); fakeOut.write(element_bytes, offsets[i], eol - offsets[i]); line.set(element_bytes, offsets[i], eol - offsets[i] - 1); stockObject.fromText(line); Rectangle mbr = stockObject.getMBR(); x1 = mbr.x1; y1 = mbr.y1; x2 = mbr.x2; y2 = mbr.y2; i++; while (i < nodes.elementAt(i_leaf).index2) { eol = skipToEOL(element_bytes, offsets[i]); fakeOut.write(element_bytes, offsets[i], eol - offsets[i]); line.set(element_bytes, offsets[i], eol - offsets[i] - 1); stockObject.fromText(line); mbr = stockObject.getMBR(); if (mbr.x1 < x1) x1 = mbr.x1; if (mbr.y1 < y1) y1 = mbr.y1; if (mbr.x2 > x2) x2 = mbr.x2; if (mbr.y2 > y2) y2 = mbr.y2; i++; } nodes.elementAt(i_leaf).set(x1, y1, x2, y2); } } finally { if (fakeOut != null) fakeOut.close(); } // Calculate MBR and offsetOfFirstElement for non-leaves for (int i_node = nonLeafNodeCount - 1; i_node >= 0; i_node--) { int i_first_child = i_node * degree + 1; nodes.elementAt(i_node).offsetOfFirstElement = nodes.elementAt(i_first_child).offsetOfFirstElement; int i_child = 0; Rectangle mbr; mbr = nodes.elementAt(i_first_child + i_child); double x1 = mbr.x1; double y1 = mbr.y1; double x2 = mbr.x2; double y2 = mbr.y2; i_child++; while (i_child < degree) { mbr = nodes.elementAt(i_first_child + i_child); if (mbr.x1 < x1) x1 = mbr.x1; if (mbr.y1 < y1) y1 = mbr.y1; if (mbr.x2 > x2) x2 = mbr.x2; if (mbr.y2 > y2) y2 = mbr.y2; i_child++; } nodes.elementAt(i_node).set(x1, y1, x2, y2); } // Start writing the tree // write tree header (including size) // Total tree size. (== Total bytes written - 8 bytes for the size itself) dataOut.writeInt(TreeHeaderSize + NodeSize * nodeCount + len); // Tree height dataOut.writeInt(height); // Degree dataOut.writeInt(degree); dataOut.writeInt(elementCount); // write nodes for (SplitStruct node : nodes) { node.write(dataOut); } // write elements for (int element_i = 0; element_i < elementCount; element_i++) { int eol = skipToEOL(element_bytes, offsets[element_i]); dataOut.write(element_bytes, offsets[element_i], eol - offsets[element_i]); } } catch (IOException e) { e.printStackTrace(); } }
From source file:org.apache.cassandra.db.compaction.LazilyCompactedRow.java
public void write(DataOutput out) throws IOException { DataOutputBuffer clockOut = new DataOutputBuffer(); ColumnFamily.serializer().serializeCFInfo(emptyColumnFamily, clockOut); long dataSize = headerBuffer.getLength() + clockOut.getLength() + columnSerializedSize; if (logger.isDebugEnabled()) logger.debug(String.format("header / clock / column sizes are %s / %s / %s", headerBuffer.getLength(), clockOut.getLength(), columnSerializedSize)); assert dataSize > 0; out.writeLong(dataSize);//from ww w. ja v a 2s. c o m out.write(headerBuffer.getData(), 0, headerBuffer.getLength()); out.write(clockOut.getData(), 0, clockOut.getLength()); out.writeInt(columnCount); Iterator<IColumn> iter = iterator(); while (iter.hasNext()) { IColumn column = iter.next(); emptyColumnFamily.getColumnSerializer().serialize(column, out); } long secondPassColumnSize = reducer == null ? 0 : reducer.serializedSize; assert secondPassColumnSize == columnSerializedSize : "originally calculated column size of " + columnSerializedSize + " but now it is " + secondPassColumnSize; }
From source file:org.apache.hadoop.hdfs.server.namenode.FSImageSerialization.java
/** * Serialize a {@link INodeFile} node//from w ww . ja v a2 s . c om * @param node The node to write * @param out The {@link DataOutputStream} where the fields are written * @param writeBlock Whether to write block information */ public static void writeINodeFile(INodeFile file, DataOutput out, boolean writeUnderConstruction) throws IOException { LOG.info("== Inside writeINodeFile() =="); LOG.info("Parameters count and cached logged into FSImage"); LOG.info("ACCESSCNT " + file.getAccessCount()); LOG.info("CACHED " + file.getIsCached()); writeLocalName(file, out); out.writeLong(file.getId()); out.writeShort(file.getFileReplication()); out.writeLong(file.getModificationTime()); out.writeLong(file.getAccessTime()); out.writeLong(file.getPreferredBlockSize()); writeBlocks(file.getBlocks(), out); SnapshotFSImageFormat.saveFileDiffList(file, out); if (writeUnderConstruction) { if (file instanceof INodeFileUnderConstruction) { out.writeBoolean(true); final INodeFileUnderConstruction uc = (INodeFileUnderConstruction) file; writeString(uc.getClientName(), out); writeString(uc.getClientMachine(), out); } else { out.writeBoolean(false); } } writePermissionStatus(file, out); out.writeLong(file.getAccessCount()); out.writeInt(file.getIsCached()); }
From source file:com.fiorano.openesb.application.aps.InPortInst.java
/** * This method is called to write this object of <code>InPortInst</code> to * the specified output stream object./*w w w.j a v a2 s . c om*/ * * @param out DataOutput object * @param versionNo Description of the Parameter * @exception IOException if an error occurs while converting data and * writing it to a binary stream. * @since Tifosi2.0 */ public void toStream(DataOutput out, int versionNo) throws IOException { super.toStream(out, versionNo); writeUTF(out, m_strPortName); writeUTF(out, m_strDscription); writeUTF(out, m_strXSD); writeUTF(out, m_strJavaClass); out.writeBoolean(m_bIsSyncRequestType); out.writeBoolean(m_bisDisabled); if (m_params != null && m_params.size() > 0) { int num = m_params.size(); out.writeInt(num); for (int i = 0; i < num; ++i) { Param param = (Param) m_params.elementAt(i); param.toStream(out, versionNo); } } else { out.writeInt(0); } }
From source file:org.apache.hama.ml.ann.NeuralNetwork.java
@Override public void write(DataOutput output) throws IOException { // write model type WritableUtils.writeString(output, modelType); // write learning rate output.writeDouble(learningRate);/*w ww.j av a 2 s .co m*/ // write model path if (this.modelPath != null) { WritableUtils.writeString(output, modelPath); } else { WritableUtils.writeString(output, "null"); } // serialize the class Class<? extends FeatureTransformer> featureTransformerCls = this.featureTransformer.getClass(); byte[] featureTransformerBytes = SerializationUtils.serialize(featureTransformerCls); output.writeInt(featureTransformerBytes.length); output.write(featureTransformerBytes); }
From source file:org.apache.horn.core.AbstractNeuralNetwork.java
@Override public void write(DataOutput output) throws IOException { // write model type WritableUtils.writeString(output, modelType); // write learning rate output.writeFloat(learningRate);/*from ww w . ja v a 2 s.co m*/ // write model path if (this.modelPath != null) { WritableUtils.writeString(output, modelPath); } else { WritableUtils.writeString(output, "null"); } // serialize the class Class<? extends FloatFeatureTransformer> featureTransformerCls = this.featureTransformer.getClass(); byte[] featureTransformerBytes = SerializationUtils.serialize(featureTransformerCls); output.writeInt(featureTransformerBytes.length); output.write(featureTransformerBytes); }
From source file:dk.statsbiblioteket.util.LineReaderTest.java
public void writeSample(DataOutput out) throws Exception { out.writeInt(12345); out.writeInt(-87);//from w ww . j a v a 2 s . c o m out.writeLong(123456789L); out.write("Hello World!\n".getBytes("utf-8")); out.write("Another world\n".getBytes("utf-8")); out.writeFloat(0.5f); out.writeBoolean(true); out.writeBoolean(false); out.writeByte(12); out.writeByte(-12); out.write(129); out.writeShort(-4567); out.writeBytes("ASCII"); }
From source file:org.apache.hama.graph.Vertex.java
@Override public void write(DataOutput out) throws IOException { if (vertexID == null) { out.writeBoolean(false);//from w w w.jav a2 s . co m } else { out.writeBoolean(true); vertexID.write(out); } if (value == null) { out.writeBoolean(false); } else { out.writeBoolean(true); value.write(out); } if (this.edges == null) { out.writeBoolean(false); } else { out.writeBoolean(true); out.writeInt(this.edges.size()); for (Edge<V, E> edge : this.edges) { edge.getDestinationVertexID().write(out); if (edge.getValue() == null) { out.writeBoolean(false); } else { out.writeBoolean(true); edge.getValue().write(out); } } } out.writeBoolean(votedToHalt); writeState(out); }
From source file:com.chinamobile.bcbsp.bspcontroller.Counters.java
@Override public synchronized void write(DataOutput out) throws IOException { out.writeInt(counters.size()); for (Group group : counters.values()) { Text.writeString(out, group.getName()); group.write(out);/*from w ww .j a v a 2 s . com*/ } }
From source file:org.apache.hadoop.hbase.security.access.HbaseObjectWritableFor96Migration.java
/** * Write a {@link Writable}, {@link String}, primitive type, or an array of * the preceding.// w w w. ja va 2s .c om * @param out * @param instance * @param declaredClass * @param conf * @throws IOException */ @SuppressWarnings("unchecked") static void writeObject(DataOutput out, Object instance, Class declaredClass, Configuration conf) throws IOException { Object instanceObj = instance; Class declClass = declaredClass; if (instanceObj == null) { // null instanceObj = new NullInstance(declClass, conf); declClass = Writable.class; } writeClassCode(out, declClass); if (declClass.isArray()) { // array // If bytearray, just dump it out -- avoid the recursion and // byte-at-a-time we were previously doing. if (declClass.equals(byte[].class)) { Bytes.writeByteArray(out, (byte[]) instanceObj); } else { //if it is a Generic array, write the element's type if (getClassCode(declaredClass) == GENERIC_ARRAY_CODE) { Class<?> componentType = declaredClass.getComponentType(); writeClass(out, componentType); } int length = Array.getLength(instanceObj); out.writeInt(length); for (int i = 0; i < length; i++) { Object item = Array.get(instanceObj, i); writeObject(out, item, item.getClass(), conf); } } } else if (List.class.isAssignableFrom(declClass)) { List list = (List) instanceObj; int length = list.size(); out.writeInt(length); for (int i = 0; i < length; i++) { Object elem = list.get(i); writeObject(out, elem, elem == null ? Writable.class : elem.getClass(), conf); } } else if (declClass == String.class) { // String Text.writeString(out, (String) instanceObj); } else if (declClass.isPrimitive()) { // primitive type if (declClass == Boolean.TYPE) { // boolean out.writeBoolean(((Boolean) instanceObj).booleanValue()); } else if (declClass == Character.TYPE) { // char out.writeChar(((Character) instanceObj).charValue()); } else if (declClass == Byte.TYPE) { // byte out.writeByte(((Byte) instanceObj).byteValue()); } else if (declClass == Short.TYPE) { // short out.writeShort(((Short) instanceObj).shortValue()); } else if (declClass == Integer.TYPE) { // int out.writeInt(((Integer) instanceObj).intValue()); } else if (declClass == Long.TYPE) { // long out.writeLong(((Long) instanceObj).longValue()); } else if (declClass == Float.TYPE) { // float out.writeFloat(((Float) instanceObj).floatValue()); } else if (declClass == Double.TYPE) { // double out.writeDouble(((Double) instanceObj).doubleValue()); } else if (declClass == Void.TYPE) { // void } else { throw new IllegalArgumentException("Not a primitive: " + declClass); } } else if (declClass.isEnum()) { // enum Text.writeString(out, ((Enum) instanceObj).name()); } else if (Message.class.isAssignableFrom(declaredClass)) { Text.writeString(out, instanceObj.getClass().getName()); ((Message) instance).writeDelimitedTo(DataOutputOutputStream.constructOutputStream(out)); } else if (Writable.class.isAssignableFrom(declClass)) { // Writable Class<?> c = instanceObj.getClass(); Integer code = CLASS_TO_CODE.get(c); if (code == null) { out.writeByte(NOT_ENCODED); Text.writeString(out, c.getName()); } else { writeClassCode(out, c); } ((Writable) instanceObj).write(out); } else if (Serializable.class.isAssignableFrom(declClass)) { Class<?> c = instanceObj.getClass(); Integer code = CLASS_TO_CODE.get(c); if (code == null) { out.writeByte(NOT_ENCODED); Text.writeString(out, c.getName()); } else { writeClassCode(out, c); } ByteArrayOutputStream bos = null; ObjectOutputStream oos = null; try { bos = new ByteArrayOutputStream(); oos = new ObjectOutputStream(bos); oos.writeObject(instanceObj); byte[] value = bos.toByteArray(); out.writeInt(value.length); out.write(value); } finally { if (bos != null) bos.close(); if (oos != null) oos.close(); } } else if (Scan.class.isAssignableFrom(declClass)) { Scan scan = (Scan) instanceObj; byte[] scanBytes = ProtobufUtil.toScan(scan).toByteArray(); out.writeInt(scanBytes.length); out.write(scanBytes); } else { throw new IOException("Can't write: " + instanceObj + " as " + declClass); } }