List of usage examples for java.io DataOutput write
void write(byte b[], int off, int len) throws IOException;
len
bytes from array b
, in order, to the output stream. From source file:org.apache.isis.objectstore.nosql.db.file.server.FileServer.java
private void startSyncing() { final String syncHost = config.getString("fileserver.sync-host", DEFAULT_HOST); final int syncPort = config.getInt("fileserver.sync-port", DEFAULT_SYNC_PORT); final int connectionTimeout = config.getInt("fileserver.connection.timeout", 5000); LOG.info("preparing to sync to secondary server on " + syncHost + " port " + syncPort); final InetAddress address; try {//from w w w.j av a 2 s. c om address = InetAddress.getByName(syncHost); } catch (final UnknownHostException e) { LOG.error("Unknown host " + syncHost, e); System.exit(0); return; } while (awaitConnections) { Socket socket = null; try { socket = new Socket(address, syncPort); LOG.info("sync connected to " + socket.getInetAddress().getHostAddress() + " port " + socket.getLocalPort()); final CRC32 crc32 = new CRC32(); final DataOutput output = new DataOutputStream( new CheckedOutputStream(socket.getOutputStream(), crc32)); final DataInput input = new DataInputStream(socket.getInputStream()); output.writeByte(INIT); long logId = input.readLong(); do { final long nextLogId = logId + 1; final File file = Util.logFile(nextLogId); if (file.exists() && server.getLogger().isWritten(nextLogId)) { logId++; output.writeByte(RECOVERY_LOG); crc32.reset(); output.writeLong(logId); LOG.info("sending recovery file: " + file.getName()); final BufferedInputStream fileInput = new BufferedInputStream(new FileInputStream(file)); final byte[] buffer = new byte[8092]; int read; while ((read = fileInput.read(buffer)) > 0) { output.writeInt(read); output.write(buffer, 0, read); } output.writeInt(0); output.writeLong(crc32.getValue()); } try { Thread.sleep(300); } catch (final InterruptedException ignore) { } while (isQuiescent) { try { Thread.sleep(300); } catch (final InterruptedException ignore) { } } } while (awaitConnections); } catch (final ConnectException e) { LOG.warn("not yet connected to secondary server at " + syncHost + " port " + syncPort); try { Thread.sleep(connectionTimeout); } catch (final InterruptedException ignore) { } } catch (final IOException e) { LOG.error("start failure - networking not set up for " + syncHost, e); try { Thread.sleep(300); } catch (final InterruptedException ignore) { } } catch (final RuntimeException e) { LOG.error("start failure", e); try { Thread.sleep(300); } catch (final InterruptedException ignore) { } } } }
From source file:org.apache.marmotta.kiwi.io.KiWiIO.java
/** * Write a string to the data output. In case the string length exceeds LITERAL_COMPRESS_LENGTH, uses a LZW * compressed format, otherwise writes the plain bytes. * * @param out output destination to write to * @param content string to write//w ww . jav a 2 s . c o m * @throws IOException */ private static void writeContent(DataOutput out, String content) throws IOException { if (content.length() > LITERAL_COMPRESS_LENGTH) { // temporary buffer of the size of bytes in the content string (assuming that the compressed data will fit into it) byte[] data = content.getBytes("UTF-8"); byte[] buffer = new byte[data.length]; Deflater compressor = new Deflater(Deflater.BEST_COMPRESSION, true); compressor.setInput(data); compressor.finish(); int length = compressor.deflate(buffer); // only use compressed version if it is smaller than the number of bytes used by the string if (length < buffer.length) { log.debug("compressed string with {} bytes; compression ratio {}", data.length, (double) length / data.length); out.writeByte(MODE_COMPRESSED); out.writeInt(data.length); out.writeInt(length); out.write(buffer, 0, length); } else { log.warn("compressed length exceeds string buffer: {} > {}", length, buffer.length); out.writeByte(MODE_DEFAULT); DataIO.writeString(out, content); } compressor.end(); } else { out.writeByte(MODE_DEFAULT); DataIO.writeString(out, content); } }
From source file:org.apache.vxquery.jsonparser.JSONParser.java
public void writeElement(ArrayBackedValueStorage abvs) throws IOException { tempABVS.reset();//from w w w .ja v a 2 s . com DataOutput out = tempABVS.getDataOutput(); out.write(abvs.getByteArray(), abvs.getStartOffset(), abvs.getLength()); FrameUtils.appendFieldToWriter(writer, appender, tempABVS.getByteArray(), tempABVS.getStartOffset(), tempABVS.getLength()); }
From source file:org.cloudata.core.common.io.CText.java
/** serialize * write this object to out/*from w w w . j a v a 2 s . co m*/ * length uses zero-compressed encoding * @see CWritable#write(DataOutput) */ public void write(DataOutput out) throws IOException { CWritableUtils.writeVInt(out, length); out.write(bytes, 0, length); }
From source file:org.cloudata.core.common.io.CText.java
/** Write a UTF8 encoded string to out *//* w w w . ja v a2 s .co m*/ public static int writeString(DataOutput out, String s) throws IOException { ByteBuffer bytes = encode(s); int length = bytes.limit(); CWritableUtils.writeVInt(out, length); out.write(bytes.array(), 0, length); return length; }
From source file:org.cloudata.core.common.io.CWritableUtils.java
public static int writeCompressedByteArray(DataOutput out, byte[] bytes) throws IOException { if (bytes != null) { ByteArrayOutputStream bos = new ByteArrayOutputStream(); GZIPOutputStream gzout = new GZIPOutputStream(bos); gzout.write(bytes, 0, bytes.length); gzout.close();/*from w ww. j av a2s.co m*/ byte[] buffer = bos.toByteArray(); int len = buffer.length; out.writeInt(len); out.write(buffer, 0, len); /* debug only! Once we have confidence, can lose this. */ return ((bytes.length != 0) ? (100 * buffer.length) / bytes.length : 0); } else { out.writeInt(-1); return -1; } }
From source file:org.cloudata.core.common.io.CWritableUtils.java
public static int writeString(DataOutput out, String s) throws IOException { if (s != null) { byte[] buffer = s.getBytes("UTF-8"); int len = buffer.length; out.writeInt(len);/*from w ww . ja va 2 s.co m*/ out.write(buffer, 0, len); return CWritableUtils.getIntByteSize() + len; } else { out.writeInt(-1); return CWritableUtils.getIntByteSize(); } }
From source file:org.commoncrawl.service.listcrawler.HDFSFileIndex.java
public static void writeIndex(Vector<FingerprintAndOffsetTuple> offsetInfo, DataOutput indexFileOut) throws IOException { long firstFingerprint = offsetInfo.get(0)._fingerprint; BloomFilter bloomFilter = new BloomFilter(offsetInfo.size(), 0.001201); // sort the offset list by fingerprint Collections.sort(offsetInfo, new Comparator<FingerprintAndOffsetTuple>() { @Override/*from ww w . ja v a 2 s. c o m*/ public int compare(FingerprintAndOffsetTuple o1, FingerprintAndOffsetTuple o2) { return (o1._fingerprint < o2._fingerprint) ? -1 : o1._fingerprint > o2._fingerprint ? 1 : 0; } }); // now we need to write the index out // allocate working set buffers ... ByteBuffer indexDataBuffer = ByteBuffer.allocate(offsetInfo.size() * 16); ByteBuffer indexHintsBuffer = ByteBuffer .allocate(((((offsetInfo.size() + INDEX_HINT_RECORD_INTERVAL) / INDEX_HINT_RECORD_INTERVAL) + 1) * INDEX_HINT_SIZE) + 4); // build index hints placeholder Vector<HDFSFileIndex.IndexItem> hints = new Vector<HDFSFileIndex.IndexItem>(); // 0 100 200 300 400 500 for (int i = 0; i < offsetInfo.size(); ++i) { if (i % INDEX_HINT_RECORD_INTERVAL == 0 || (i == (offsetInfo.size() - 1))) { HDFSFileIndex.IndexItem hint = new IndexItem(offsetInfo.get(i)._fingerprint, (int) offsetInfo.get(i)._offset); hints.add(hint); // add fingerprint to bloom filter bloomFilter.add(hint.fingerprint); } } // start off the index hints buffer with a hint of the index hint buffer size indexHintsBuffer.putInt(hints.size()); // track total bits used ... int bitsUsedForHints = 0; int bitsUsedForFingerprints = 0; int bitsUsedForOffsets = 0; // now start populating index data ... for (int hintIdx = 0; hintIdx < hints.size(); ++hintIdx) { HDFSFileIndex.IndexItem hint = hints.get(hintIdx); LOG.info("IndexWriter FP:" + hint.fingerprint); indexHintsBuffer.putLong(hint.fingerprint); indexHintsBuffer.putInt(hint.dataOffset); indexHintsBuffer.putInt(indexDataBuffer.position()); // update stats bitsUsedForHints += INDEX_HINT_SIZE * 8; if (hintIdx < hints.size() - 1) { // track cumilative delta and offset values (for average calc later) double cumilativeDelta = 0; long cumilativeOffset = 0; int subIndexItemCount = 0; int nonZeroDeltaCount = 0; Vector<HDFSFileIndex.IndexItem> subHints = new Vector<HDFSFileIndex.IndexItem>(); // initialize last fingerprint to indexed value ... long lastFingerprint = hint.fingerprint; // first collect values in between index hints for (int nonIndexItem = (hintIdx * INDEX_HINT_RECORD_INTERVAL) + 1; nonIndexItem < ((hintIdx + 1) * INDEX_HINT_RECORD_INTERVAL); ++nonIndexItem) { if (nonIndexItem >= offsetInfo.size()) break; // calculdate fingerprint delta ... long fingerprintDelta = offsetInfo.get(nonIndexItem)._fingerprint - lastFingerprint; LOG.info("IndexWriter FP:" + offsetInfo.get(nonIndexItem)._fingerprint + " Delta:" + fingerprintDelta); // offset delta if (fingerprintDelta != 0) { cumilativeDelta += (double) fingerprintDelta; LOG.info("Cumilative Delta is:" + cumilativeDelta); nonZeroDeltaCount++; } cumilativeOffset += offsetInfo.get(nonIndexItem)._offset; ++subIndexItemCount; // add to collection vector subHints.add(new IndexItem(fingerprintDelta, (int) offsetInfo.get(nonIndexItem)._offset)); // remember the last fingerpint ... lastFingerprint = offsetInfo.get(nonIndexItem)._fingerprint; // add item to bloom filter bloomFilter.add(lastFingerprint); } // calculate average delta value double averageDeltaValue = (double) cumilativeDelta / (double) nonZeroDeltaCount; // calculate m for fingerprint deltas int mForFingerprints = (int) Math.floor(lg(averageDeltaValue)); LOG.info("Average Delta Value is:" + averageDeltaValue + " m is:" + mForFingerprints); // cacluldate average offset value double averageOffsetValue = (double) cumilativeOffset / (double) subIndexItemCount; // calculate m for offsets int mForOffsets = (int) Math.floor(lg(averageOffsetValue)); // calculate rice codes RiceCoding riceCodeFP = new RiceCoding(mForFingerprints); RiceCoding riceCodeOffsets = new RiceCoding(mForOffsets); // populate bits for (HDFSFileIndex.IndexItem subItemHint : subHints) { if (subItemHint.fingerprint == 0) { LOG.warn("Zero Delta for Fingerprint Detected.There are two duplicate entires in log!"); } riceCodeFP.addItem(subItemHint.fingerprint + 1); riceCodeOffsets.addItem(subItemHint.dataOffset + 1); } // now track bits used ... bitsUsedForFingerprints += riceCodeFP.getNumBits(); bitsUsedForOffsets += riceCodeOffsets.getNumBits(); // write out metadata // save the current position int currentPosition = indexDataBuffer.position(); // fingerprint data indexDataBuffer.put((byte) mForFingerprints); CacheManager.writeVLongToByteBuffer(indexDataBuffer, riceCodeFP.getNumBits()); indexDataBuffer.put(riceCodeFP.getBits(), 0, (riceCodeFP.getNumBits() + 7) / 8); // offset data indexDataBuffer.put((byte) mForOffsets); CacheManager.writeVLongToByteBuffer(indexDataBuffer, riceCodeOffsets.getNumBits()); indexDataBuffer.put(riceCodeOffsets.getBits(), 0, (riceCodeOffsets.getNumBits() + 7) / 8); System.out.println("Item Count:" + subIndexItemCount + "FP Bits:" + subIndexItemCount * 64 + " Compressed:" + riceCodeFP.getNumBits() + " Offset Bits:" + subIndexItemCount * 32 + " Compressed:" + riceCodeOffsets.getNumBits()); LOG.info("Item Count:" + subIndexItemCount + "FP Bits:" + subIndexItemCount * 64 + " Compressed:" + riceCodeFP.getNumBits() + " Offset Bits:" + subIndexItemCount * 32 + " Compressed:" + riceCodeOffsets.getNumBits()); if ((subIndexItemCount * 64) < riceCodeFP.getNumBits()) { throw new RuntimeException("Compressed Size > UnCompressed Size!!!!"); } validateIndexData(indexDataBuffer.array(), currentPosition, hint.fingerprint, subHints, bloomFilter); } } if (!bloomFilter.isPresent(firstFingerprint)) { throw new RuntimeException("Test Failed!"); } // serialize bloomfilter ByteStream baos = new ByteStream(1 << 12); BloomFilter.serializer().serialize(bloomFilter, new DataOutputStream(baos)); // spit out final stats System.out.println(" Bloomfilter Size:" + baos.size() + " IndexHintBuffer Size:" + indexHintsBuffer.position() + " IndexDataBuffer Size:" + indexDataBuffer.position()); // now write out the final index file ... // bloom filter data ... indexFileOut.write(baos.getBuffer(), 0, baos.size()); // write hint data indexFileOut.write(indexHintsBuffer.array(), 0, indexHintsBuffer.position()); // write out rice code data size indexFileOut.writeInt(indexDataBuffer.position()); // finally rice coded sub-index data indexFileOut.write(indexDataBuffer.array(), 0, indexDataBuffer.position()); }
From source file:org.commoncrawl.util.TextBytes.java
/** * serialize write this object to out length uses zero-compressed encoding * // w w w .ja v a 2 s. c o m * @see Writable#write(DataOutput) */ public void write(DataOutput out) throws IOException { WritableUtils.writeVInt(out, bytes.getCount()); if (bytes.getCount() != 0) { out.write(bytes.get(), bytes.getOffset(), bytes.getCount()); } }
From source file:org.godhuli.rhipe.RHBytesWritable.java
public void write(final DataOutput out) throws IOException { WritableUtils.writeVInt(out, size);/*w ww. j ava 2 s . co m*/ out.write(bytes, 0, size); }