Example usage for java.io DataOutputStream writeShort

List of usage examples for java.io DataOutputStream writeShort

Introduction

In this page you can find the example usage for java.io DataOutputStream writeShort.

Prototype

public final void writeShort(int v) throws IOException 

Source Link

Document

Writes a short to the underlying output stream as two bytes, high byte first.

Usage

From source file:org.apache.hadoop.hbase.io.hfile.TestHFileBlock.java

private long writeBlocks(Random rand, Compression.Algorithm compressAlgo, Path path, List<Long> expectedOffsets,
        List<Long> expectedPrevOffsets, List<BlockType> expectedTypes, List<ByteBuffer> expectedContents)
        throws IOException {
    boolean cacheOnWrite = expectedContents != null;
    FSDataOutputStream os = fs.create(path);
    HFileContext meta = new HFileContextBuilder().withHBaseCheckSum(true).withIncludesMvcc(includesMemstoreTS)
            .withIncludesTags(includesTag).withCompression(compressAlgo)
            .withBytesPerCheckSum(HFile.DEFAULT_BYTES_PER_CHECKSUM)
            .withChecksumType(HFile.DEFAULT_CHECKSUM_TYPE).build();
    HFileBlock.Writer hbw = new HFileBlock.Writer(null, meta);
    Map<BlockType, Long> prevOffsetByType = new HashMap<BlockType, Long>();
    long totalSize = 0;
    for (int i = 0; i < NUM_TEST_BLOCKS; ++i) {
        long pos = os.getPos();
        int blockTypeOrdinal = rand.nextInt(BlockType.values().length);
        if (blockTypeOrdinal == BlockType.ENCODED_DATA.ordinal()) {
            blockTypeOrdinal = BlockType.DATA.ordinal();
        }//from www  .  j  a  va  2 s .c  om
        BlockType bt = BlockType.values()[blockTypeOrdinal];
        DataOutputStream dos = hbw.startWriting(bt);
        int size = rand.nextInt(500);
        for (int j = 0; j < size; ++j) {
            // This might compress well.
            dos.writeShort(i + 1);
            dos.writeInt(j + 1);
        }

        if (expectedOffsets != null)
            expectedOffsets.add(os.getPos());

        if (expectedPrevOffsets != null) {
            Long prevOffset = prevOffsetByType.get(bt);
            expectedPrevOffsets.add(prevOffset != null ? prevOffset : -1);
            prevOffsetByType.put(bt, os.getPos());
        }

        expectedTypes.add(bt);

        hbw.writeHeaderAndData(os);
        totalSize += hbw.getOnDiskSizeWithHeader();

        if (cacheOnWrite)
            expectedContents.add(hbw.getUncompressedBufferWithHeader());

        if (detailedLogging) {
            LOG.info("Written block #" + i + " of type " + bt + ", uncompressed size "
                    + hbw.getUncompressedSizeWithoutHeader() + " at offset " + pos);
        }
    }
    os.close();
    LOG.info("Created a temporary file at " + path + ", " + fs.getFileStatus(path).getLen()
            + " byte, compression=" + compressAlgo);
    return totalSize;
}

From source file:bobs.is.compress.sevenzip.SevenZOutputFile.java

private void writeFileNames(final DataOutput header) throws IOException {
    header.write(NID.kName);//from  ww  w  . j a  va2  s.c om

    final ByteArrayOutputStream baos = new ByteArrayOutputStream();
    final DataOutputStream out = new DataOutputStream(baos);
    out.write(0);
    for (final SevenZArchiveEntry entry : files) {
        out.write(entry.getName().getBytes("UTF-16LE"));
        out.writeShort(0);
    }
    out.flush();
    final byte[] contents = baos.toByteArray();
    writeUint64(header, contents.length);
    header.write(contents);
}

From source file:org.apache.fontbox.ttf.TTFSubsetter.java

private void writeUint16(DataOutputStream out, int i) throws IOException {
    out.writeShort(i);
}

From source file:org.apache.fontbox.ttf.TTFSubsetter.java

private void writeSInt16(DataOutputStream out, short i) throws IOException {
    out.writeShort(i);
}

From source file:org.apache.fontbox.ttf.TTFSubsetter.java

/**
 * @param out The data output stream.//from   w w  w.j  a v a  2  s  .  c om
 * @param nTables The number of table.
 * @return The file offset of the first TTF table to write.
 * @throws IOException Upon errors.
 */
private long writeFileHeader(DataOutputStream out, int nTables) throws IOException {
    out.writeInt(0x00010000);
    out.writeShort(nTables);

    int mask = Integer.highestOneBit(nTables);
    int searchRange = mask * 16;
    out.writeShort(searchRange);

    int entrySelector = log2(mask);

    out.writeShort(entrySelector);

    // numTables * 16 - searchRange
    int last = 16 * nTables - searchRange;
    out.writeShort(last);

    return 0x00010000L + toUInt32(nTables, searchRange) + toUInt32(entrySelector, last);
}

From source file:com.joey.software.MoorFLSI.RepeatImageTextReader.java

public void saveData(File f) throws IOException {
    DataOutputStream out = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(f)));

    out.writeInt(imageData.size());/*ww  w  .  jav  a 2s  .  c o m*/
    out.writeInt(wide);
    out.writeInt(high);

    for (int i = 0; i < imageData.size(); i++) {
        System.out.println(i);
        out.writeLong(imageTime.get(i).getTime());
        for (int x = 0; x < wide; x++) {
            for (int y = 0; y < high; y++) {
                out.writeShort(imageData.get(i)[x][y]);
            }
        }
    }

    out.close();

}

From source file:org.apache.fontbox.ttf.TTFSubsetter.java

private void writeFixed(DataOutputStream out, double f) throws IOException {
    double ip = Math.floor(f);
    double fp = (f - ip) * 65536.0;
    out.writeShort((int) ip);
    out.writeShort((int) fp);
}

From source file:org.apache.hadoop.hdfs.server.datanode.DataXceiver.java

/**
 * Get block checksum (MD5 of CRC32).//w  w w  .  j a  v  a  2  s.c o m
 * @param in
 */
void getBlockChecksum(DataInputStream in) throws IOException {
    final Block block = new Block(in.readLong(), 0, in.readLong());
    Token<BlockTokenIdentifier> accessToken = new Token<BlockTokenIdentifier>();
    accessToken.readFields(in);
    DataOutputStream out = new DataOutputStream(NetUtils.getOutputStream(s, datanode.socketWriteTimeout));
    if (datanode.isBlockTokenEnabled) {
        try {
            datanode.blockTokenSecretManager.checkAccess(accessToken, null, block,
                    BlockTokenSecretManager.AccessMode.READ);
        } catch (InvalidToken e) {
            try {
                out.writeShort(DataTransferProtocol.OP_STATUS_ERROR_ACCESS_TOKEN);
                out.flush();
                throw new IOException("Access token verification failed, for client " + remoteAddress
                        + " for OP_BLOCK_CHECKSUM for block " + block);
            } finally {
                IOUtils.closeStream(out);
            }
        }
    }

    final MetaDataInputStream metadataIn = datanode.data.getMetaDataInputStream(block);
    final DataInputStream checksumIn = new DataInputStream(new BufferedInputStream(metadataIn, BUFFER_SIZE));

    try {
        //read metadata file
        final BlockMetadataHeader header = BlockMetadataHeader.readHeader(checksumIn);
        final DataChecksum checksum = header.getChecksum();
        final int bytesPerCRC = checksum.getBytesPerChecksum();
        final long crcPerBlock = (metadataIn.getLength() - BlockMetadataHeader.getHeaderSize())
                / checksum.getChecksumSize();

        //compute block checksum
        final MD5Hash md5 = MD5Hash.digest(checksumIn);

        if (LOG.isDebugEnabled()) {
            LOG.debug("block=" + block + ", bytesPerCRC=" + bytesPerCRC + ", crcPerBlock=" + crcPerBlock
                    + ", md5=" + md5);
        }

        //write reply
        out.writeShort(DataTransferProtocol.OP_STATUS_SUCCESS);
        out.writeInt(bytesPerCRC);
        out.writeLong(crcPerBlock);
        md5.write(out);
        out.flush();
    } finally {
        IOUtils.closeStream(out);
        IOUtils.closeStream(checksumIn);
        IOUtils.closeStream(metadataIn);
    }
}

From source file:org.apache.hadoop.hdfs.server.datanode.DataXceiver.java

/**
 * Read a block from the disk./*from   w w  w  .jav a2s  . c  om*/
 * @param in The stream to read from
 * @throws IOException
 */
private void readBlock(DataInputStream in) throws IOException {
    //
    // Read in the header
    //
    long blockId = in.readLong();
    Block block = new Block(blockId, 0, in.readLong());

    long startOffset = in.readLong();
    long length = in.readLong();
    String clientName = Text.readString(in);
    Token<BlockTokenIdentifier> accessToken = new Token<BlockTokenIdentifier>();
    accessToken.readFields(in);
    OutputStream baseStream = NetUtils.getOutputStream(s, datanode.socketWriteTimeout);
    DataOutputStream out = new DataOutputStream(new BufferedOutputStream(baseStream, SMALL_BUFFER_SIZE));

    if (datanode.isBlockTokenEnabled) {
        try {
            datanode.blockTokenSecretManager.checkAccess(accessToken, null, block,
                    BlockTokenSecretManager.AccessMode.READ);
        } catch (InvalidToken e) {
            try {
                out.writeShort(DataTransferProtocol.OP_STATUS_ERROR_ACCESS_TOKEN);
                out.flush();
                throw new IOException("Access token verification failed, for client " + remoteAddress
                        + " for OP_READ_BLOCK for block " + block);
            } finally {
                IOUtils.closeStream(out);
            }
        }
    }
    // send the block
    BlockSender blockSender = null;
    final String clientTraceFmt = clientName.length() > 0 && ClientTraceLog.isInfoEnabled()
            ? String.format(DN_CLIENTTRACE_FORMAT, localAddress, remoteAddress, "%d", "HDFS_READ", clientName,
                    "%d", datanode.dnRegistration.getStorageID(), block, "%d")
            : datanode.dnRegistration + " Served block " + block + " to " + s.getInetAddress();
    try {
        try {
            blockSender = new BlockSender(block, startOffset, length, true, true, false, datanode,
                    clientTraceFmt);
        } catch (IOException e) {
            out.writeShort(DataTransferProtocol.OP_STATUS_ERROR);
            throw e;
        }

        out.writeShort(DataTransferProtocol.OP_STATUS_SUCCESS); // send op status
        long read = blockSender.sendBlock(out, baseStream, null); // send data

        if (blockSender.isBlockReadFully()) {
            // See if client verification succeeded. 
            // This is an optional response from client.
            try {
                if (in.readShort() == DataTransferProtocol.OP_STATUS_CHECKSUM_OK
                        && datanode.blockScanner != null) {
                    datanode.blockScanner.verifiedByClient(block);
                }
            } catch (IOException ignored) {
            }
        }

        datanode.myMetrics.incrBytesRead((int) read);
        datanode.myMetrics.incrBlocksRead();
    } catch (SocketException ignored) {
        // Its ok for remote side to close the connection anytime.
        datanode.myMetrics.incrBlocksRead();
    } catch (IOException ioe) {
        /* What exactly should we do here?
         * Earlier version shutdown() datanode if there is disk error.
         */
        LOG.warn(datanode.dnRegistration + ":Got exception while serving " + block + " to " + s.getInetAddress()
                + ":\n" + StringUtils.stringifyException(ioe));
        throw ioe;
    } finally {
        IOUtils.closeStream(out);
        IOUtils.closeStream(blockSender);
    }
}

From source file:org.apache.fop.render.pcl.PCLGenerator.java

/**
 * Generates a user-defined pattern for a dithering pattern matching the grayscale value
 * of the color given.//w  ww .ja  v a  2  s  .com
 * @param col the color to create the pattern for
 * @param patternID the pattern ID to use
 * @param ditherMatrixSize the size of the Bayer dither matrix to use (4 or 8 supported)
 * @throws IOException In case of an I/O error
 */
public void defineGrayscalePattern(Color col, int patternID, int ditherMatrixSize) throws IOException {
    ByteArrayOutputStream baout = new ByteArrayOutputStream();
    DataOutputStream data = new DataOutputStream(baout);
    data.writeByte(0); //Format
    data.writeByte(0); //Continuation
    data.writeByte(1); //Pixel Encoding
    data.writeByte(0); //Reserved
    data.writeShort(8); //Width in Pixels
    data.writeShort(8); //Height in Pixels
    //data.writeShort(600); //X Resolution (didn't manage to get that to work)
    //data.writeShort(600); //Y Resolution
    int gray255 = convertToGray(col.getRed(), col.getGreen(), col.getBlue());

    byte[] pattern;
    if (ditherMatrixSize == 8) {
        pattern = DitherUtil.getBayerDither(DitherUtil.DITHER_MATRIX_8X8, gray255, false);
    } else {
        //Since a 4x4 pattern did not work, the 4x4 pattern is applied 4 times to an
        //8x8 pattern. Maybe this could be changed to use an 8x8 bayer dither pattern
        //instead of the 4x4 one.
        pattern = DitherUtil.getBayerDither(DitherUtil.DITHER_MATRIX_4X4, gray255, true);
    }
    data.write(pattern);
    if ((baout.size() % 2) > 0) {
        baout.write(0);
    }
    writeCommand("*c" + patternID + "G");
    writeCommand("*c" + baout.size() + "W");
    baout.writeTo(this.out);
    writeCommand("*c4Q"); //temporary pattern
}