Example usage for java.io DataOutput writeByte

List of usage examples for java.io DataOutput writeByte

Introduction

In this page you can find the example usage for java.io DataOutput writeByte.

Prototype

void writeByte(int v) throws IOException;

Source Link

Document

Writes to the output stream the eight low- order bits of the argument v.

Usage

From source file:org.openxdata.server.service.impl.FormDownloadServiceTest.java

@Test
@Ignore("throws too many exceptions")
public void testSubmitForms_noSerializer() throws Exception {

    // create the stream
    final PipedOutputStream pout = new PipedOutputStream();
    DataInputStream in = new DataInputStream(new PipedInputStream(pout));
    Thread thread = new Thread(new Runnable() {
        @Override//  w w w.jav  a  2s.  c  o m
        public void run() {
            DataOutput output = new DataOutputStream(pout);
            try {
                output.writeByte(1);
                output.writeUTF(XFormsFixture.getSampleFormModelData());
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
    });
    thread.start();
    DataOutputStream out = new DataOutputStream(new ByteArrayOutputStream());

    // run test
    formDownloadService.submitForms(in, out, null);

    // do checks afterwards
    List<FormDataHeader> formData = studyManagerService.getFormData(12, null, null, null);
    Assert.assertEquals("after submit there is 1 form data", 1, formData.size());
}

From source file:com.marklogic.contentpump.ContentWithFileNameWritable.java

@Override
public void write(DataOutput out) throws IOException {
    Text.writeString(out, fileName);
    out.writeByte(type);
    if (value instanceof Text) {
        ((Text) value).write(out);
    } else if (value instanceof MarkLogicNode) {
        ((MarkLogicNode) value).write(out);
    } else if (value instanceof BytesWritable) {
        ((BytesWritable) value).write(out);
    }//from w  w  w  .ja v a  2 s.  c  om
}

From source file:com.marklogic.contentpump.RDFWritable.java

@Override
public void write(DataOutput out) throws IOException {
    if (graphUri == null) {
        out.writeByte(0);
    } else {/* w  ww .  j a  v a2  s .  c  o  m*/
        out.writeByte(1);
        Text t = new Text(graphUri);
        t.write(out);
    }
    out.writeByte(type);
    if (value instanceof Text) {
        ((Text) value).write(out);
    } else if (value instanceof MarkLogicNode) {
        ((MarkLogicNode) value).write(out);
    } else if (value instanceof BytesWritable) {
        ((BytesWritable) value).write(out);
    }
    //serialize permissions
    if (permissions == null) {
        out.writeByte(0);
    } else {
        out.writeByte(permissions.length);
        for (int i = 0; i < permissions.length; i++) {
            Text role = new Text(permissions[i].getRole());
            Text cap = new Text(permissions[i].getCapability().toString());
            role.write(out);
            cap.write(out);
        }
    }
}

From source file:libra.common.hadoop.io.datatypes.CompressedIntArrayWritable.java

@Override
public void write(DataOutput out) throws IOException {
    int count = this.intArray.length;
    byte flag = makeFlag(count, this.intArray);
    out.writeByte(flag);

    if ((flag & 0x0f) == 0x00) {
        out.writeByte(count);// ww w  .j  a  v a2 s  .  co m
    } else if ((flag & 0x0f) == 0x01) {
        out.writeShort(count);
    } else if ((flag & 0x0f) == 0x02) {
        out.writeInt(count);
    } else {
        throw new IOException("unhandled flag");
    }

    if ((flag & 0xf0) == 0x00) {
        for (int i = 0; i < count; i++) {
            out.writeByte((byte) this.intArray[i]);
        }
    } else if ((flag & 0xf0) == 0x10) {
        for (int i = 0; i < count; i++) {
            out.writeShort((short) this.intArray[i]);
        }
    } else if ((flag & 0xf0) == 0x20) {
        for (int i = 0; i < count; i++) {
            out.writeInt((int) this.intArray[i]);
        }
    } else {
        throw new IOException("unhandled flag");
    }
}

From source file:com.mapr.hbase.support.objects.MHRegionInfo.java

@Override
public void write(DataOutput out) throws IOException {
    out.writeByte(getVersion());
    Bytes.writeByteArray(out, endKey);/*  w w  w.jav  a 2s .com*/
    out.writeBoolean(offLine);
    out.writeLong(regionId);
    Bytes.writeByteArray(out, regionName);
    out.writeBoolean(split);
    Bytes.writeByteArray(out, startKey);
    if (getVersion() == 0) {
        tableDesc.write(out);
    } else {
        Bytes.writeByteArray(out, tableName);
    }
    out.writeInt(hashCode);
}

From source file:cn.iie.haiep.hbase.value.Bytes.java

/**
 * Writes a string as a fixed-size field, padded with zeros.
 */// www .  j a v  a  2  s .  c om
public static void writeStringFixedSize(final DataOutput out, String s, int size) throws IOException {
    byte[] b = toBytes(s);
    if (b.length > size) {
        throw new IOException("Trying to write " + b.length + " bytes (" + toStringBinary(b)
                + ") into a field of length " + size);
    }

    out.writeBytes(s);
    for (int i = 0; i < size - s.length(); ++i)
        out.writeByte(0);
}

From source file:it.unimi.dsi.sux4j.mph.CHDMinimalPerfectHashFunction.java

/**
 * Creates a new CHD minimal perfect hash function for the given keys.
 * //from ww  w  .  j a va 2s. c  o  m
 * @param keys the keys to hash, or {@code null}.
 * @param transform a transformation strategy for the keys.
 * @param lambda the average bucket size.
 * @param loadFactor the load factor.
 * @param signatureWidth a signature width, or 0 for no signature.
 * @param tempDir a temporary directory for the store files, or {@code null} for the standard temporary directory.
 * @param chunkedHashStore a chunked hash store containing the keys, or {@code null}; the store
 * can be unchecked, but in this case <code>keys</code> and <code>transform</code> must be non-{@code null}. 
 */
protected CHDMinimalPerfectHashFunction(final Iterable<? extends T> keys,
        final TransformationStrategy<? super T> transform, final int lambda, double loadFactor,
        final int signatureWidth, final File tempDir, ChunkedHashStore<T> chunkedHashStore) throws IOException {
    this.transform = transform;

    final ProgressLogger pl = new ProgressLogger(LOGGER);
    pl.displayLocalSpeed = true;
    pl.displayFreeMemory = true;
    final RandomGenerator r = new XorShift1024StarRandomGenerator();
    pl.itemsName = "keys";

    final boolean givenChunkedHashStore = chunkedHashStore != null;
    if (!givenChunkedHashStore) {
        chunkedHashStore = new ChunkedHashStore<T>(transform, tempDir, pl);
        chunkedHashStore.reset(r.nextLong());
        chunkedHashStore.addAll(keys.iterator());
    }
    n = chunkedHashStore.size();

    defRetValue = -1; // For the very few cases in which we can decide

    int log2NumChunks = Math.max(0, Fast.mostSignificantBit(n >> LOG2_CHUNK_SIZE));
    chunkShift = chunkedHashStore.log2Chunks(log2NumChunks);
    final int numChunks = 1 << log2NumChunks;

    LOGGER.debug("Number of chunks: " + numChunks);
    LOGGER.debug("Average chunk size: " + (double) n / numChunks);

    offsetNumBucketsSeed = new long[(numChunks + 1) * 3 + 2];

    int duplicates = 0;
    final LongArrayList holes = new LongArrayList();

    @SuppressWarnings("resource")
    final OfflineIterable<MutableLong, MutableLong> coefficients = new OfflineIterable<MutableLong, MutableLong>(
            new Serializer<MutableLong, MutableLong>() {

                @Override
                public void write(final MutableLong a, final DataOutput dos) throws IOException {
                    long x = a.longValue();
                    while ((x & ~0x7FL) != 0) {
                        dos.writeByte((int) (x | 0x80));
                        x >>>= 7;
                    }
                    dos.writeByte((int) x);
                }

                @Override
                public void read(final DataInput dis, final MutableLong x) throws IOException {
                    byte b = dis.readByte();
                    long t = b & 0x7F;
                    for (int shift = 7; (b & 0x80) != 0; shift += 7) {
                        b = dis.readByte();
                        t |= (b & 0x7FL) << shift;
                    }
                    x.setValue(t);
                }
            }, new MutableLong());

    for (;;) {
        LOGGER.debug("Generating minimal perfect hash function...");

        holes.clear();
        coefficients.clear();
        pl.expectedUpdates = numChunks;
        pl.itemsName = "chunks";
        pl.start("Analysing chunks... ");

        try {
            int chunkNumber = 0;

            for (ChunkedHashStore.Chunk chunk : chunkedHashStore) {
                /* We treat a chunk as a single hash function. The number of bins is thus
                 * the first prime larger than the chunk size divided by the load factor. */
                final int p = Primes.nextPrime((int) Math.ceil(chunk.size() / loadFactor) + 1);
                final boolean used[] = new boolean[p];

                final int numBuckets = (chunk.size() + lambda - 1) / lambda;
                numBuckets(chunkNumber + 1, numBuckets(chunkNumber) + numBuckets);
                final int[] cc0 = new int[numBuckets];
                final int[] cc1 = new int[numBuckets];
                @SuppressWarnings("unchecked")
                final ArrayList<long[]>[] bucket = new ArrayList[numBuckets];
                for (int i = bucket.length; i-- != 0;)
                    bucket[i] = new ArrayList<long[]>();

                tryChunk: for (;;) {
                    for (ArrayList<long[]> b : bucket)
                        b.clear();
                    Arrays.fill(used, false);

                    /* At each try, the allocation to keys to bucket is randomized differently. */
                    final long seed = r.nextLong();
                    // System.err.println( "Number of keys: " + chunk.size()  + " Number of bins: " + p + " seed: " + seed );
                    /* We distribute the keys in this chunks in the buckets. */
                    for (Iterator<long[]> iterator = chunk.iterator(); iterator.hasNext();) {
                        final long[] triple = iterator.next();
                        final long[] h = new long[3];
                        Hashes.spooky4(triple, seed, h);
                        final ArrayList<long[]> b = bucket[(int) ((h[0] >>> 1) % numBuckets)];
                        h[1] = (int) ((h[1] >>> 1) % p);
                        h[2] = (int) ((h[2] >>> 1) % (p - 1)) + 1;

                        // All elements in a bucket must have either different h[ 1 ] or different h[ 2 ]
                        for (long[] t : b)
                            if (t[1] == h[1] && t[2] == h[2]) {
                                LOGGER.info("Duplicate index" + Arrays.toString(t));
                                continue tryChunk;
                            }
                        b.add(h);
                    }

                    final int[] perm = Util.identity(bucket.length);
                    IntArrays.quickSort(perm, new AbstractIntComparator() {
                        private static final long serialVersionUID = 1L;

                        @Override
                        public int compare(int a0, int a1) {
                            return Integer.compare(bucket[a1].size(), bucket[a0].size());
                        }
                    });

                    for (int i = 0; i < perm.length;) {
                        final LinkedList<Integer> bucketsToDo = new LinkedList<Integer>();
                        final int size = bucket[perm[i]].size();
                        //System.err.println( "Bucket size: " + size );
                        int j;
                        // Gather indices of all buckets with the same size
                        for (j = i; j < perm.length && bucket[perm[j]].size() == size; j++)
                            bucketsToDo.add(Integer.valueOf(perm[j]));

                        // Examine for each pair (c0,c1) the buckets still to do
                        ext: for (int c1 = 0; c1 < p; c1++)
                            for (int c0 = 0; c0 < p; c0++) {
                                //System.err.println( "Testing " + c0 + ", " + c1 + " (to do: " + bucketsToDo.size() + ")" );
                                for (Iterator<Integer> iterator = bucketsToDo.iterator(); iterator.hasNext();) {
                                    final int k = iterator.next().intValue();
                                    final ArrayList<long[]> b = bucket[k];
                                    boolean completed = true;
                                    final IntArrayList done = new IntArrayList();
                                    // Try to see whether the necessary entries are not used
                                    for (long[] h : b) {
                                        //assert k == h[ 0 ];

                                        int pos = (int) ((h[1] + c0 * h[2] + c1) % p);
                                        //System.err.println( "Testing pos " + pos + " for " + Arrays.toString( e  ));
                                        if (used[pos]) {
                                            completed = false;
                                            break;
                                        } else {
                                            used[pos] = true;
                                            done.add(pos);
                                        }
                                    }

                                    if (completed) {
                                        // All positions were free
                                        cc0[k] = c0;
                                        cc1[k] = c1;
                                        iterator.remove();
                                    } else
                                        for (int d : done)
                                            used[d] = false;
                                }
                                if (bucketsToDo.isEmpty())
                                    break ext;
                            }
                        if (!bucketsToDo.isEmpty())
                            continue tryChunk;

                        seed(chunkNumber, seed);
                        i = j;
                    }
                    break;
                }

                // System.err.println("DONE!");

                if (ASSERTS) {
                    final IntOpenHashSet pos = new IntOpenHashSet();
                    final long h[] = new long[3];
                    for (Iterator<long[]> iterator = chunk.iterator(); iterator.hasNext();) {
                        final long[] triple = iterator.next();
                        Hashes.spooky4(triple, seed(chunkNumber), h);
                        h[0] = (h[0] >>> 1) % numBuckets;
                        h[1] = (int) ((h[1] >>> 1) % p);
                        h[2] = (int) ((h[2] >>> 1) % (p - 1)) + 1;
                        //System.err.println( Arrays.toString(  e  ) );
                        assert pos.add((int) ((h[1] + cc0[(int) (h[0])] * h[2] + cc1[(int) (h[0])]) % p));
                    }
                }

                final MutableLong l = new MutableLong();
                for (int i = 0; i < numBuckets; i++) {
                    l.setValue(cc0[i] + cc1[i] * p);
                    coefficients.add(l);
                }

                for (int i = 0; i < p; i++)
                    if (!used[i])
                        holes.add(offset(chunkNumber) + i);

                offset(chunkNumber + 1, offset(chunkNumber) + p);
                chunkNumber++;
                pl.update();
            }

            pl.done();
            break;
        } catch (ChunkedHashStore.DuplicateException e) {
            if (keys == null)
                throw new IllegalStateException(
                        "You provided no keys, but the chunked hash store was not checked");
            if (duplicates++ > 3)
                throw new IllegalArgumentException("The input list contains duplicates");
            LOGGER.warn("Found duplicate. Recomputing triples...");
            chunkedHashStore.reset(r.nextLong());
            chunkedHashStore.addAll(keys.iterator());
        }
    }

    rank = new SparseRank(offset(offsetNumBucketsSeed.length / 3 - 1), holes.size(), holes.iterator());

    globalSeed = chunkedHashStore.seed();

    this.coefficients = new EliasFanoLongBigList(new AbstractLongIterator() {
        final OfflineIterator<MutableLong, MutableLong> iterator = coefficients.iterator();

        @Override
        public boolean hasNext() {
            return iterator.hasNext();
        }

        public long nextLong() {
            return iterator.next().longValue();
        }
    }, 0, true);

    coefficients.close();

    LOGGER.info("Completed.");
    LOGGER.info("Actual bit cost per key: " + (double) numBits() / n);

    if (signatureWidth != 0) {
        signatureMask = -1L >>> Long.SIZE - signatureWidth;
        (signatures = LongArrayBitVector.getInstance().asLongBigList(signatureWidth)).size(n);
        pl.expectedUpdates = n;
        pl.itemsName = "signatures";
        pl.start("Signing...");
        for (ChunkedHashStore.Chunk chunk : chunkedHashStore) {
            Iterator<long[]> iterator = chunk.iterator();
            for (int i = chunk.size(); i-- != 0;) {
                final long[] triple = iterator.next();
                long t = getLongByTripleNoCheck(triple);
                signatures.set(t, signatureMask & triple[0]);
                pl.lightUpdate();
            }
        }
        pl.done();
    } else {
        signatureMask = 0;
        signatures = null;
    }

    if (!givenChunkedHashStore)
        chunkedHashStore.close();
}

From source file:com.marklogic.tree.ExpandedTree.java

@Override
public void write(DataOutput out) throws IOException {
    out.writeLong(uriKey);/* ww  w.  j a va  2 s  .c om*/
    out.writeLong(uniqKey);
    out.writeLong(linkKey);
    out.writeInt(numKeys);
    if (numKeys > 0) {
        for (long key : keys) {
            out.writeLong(key);
        }
    }
    if (atomData != null && atomData.length > 0) {
        out.writeInt(atomData.length);
        for (int i = 0; i < atomData.length; i++) {
            out.writeByte(atomData[i]);
        }
    } else {
        out.writeInt(0);
    }
    out.writeInt(atomLimit);
    if (atomIndex != null && atomIndex.length > 0) {
        for (int i = 0; i < atomIndex.length; i++) {
            out.writeInt(atomIndex[i]);
        }
    }
    if (nodeNameNameAtom != null && nodeNameNameAtom.length > 0) {
        out.writeInt(nodeNameNameAtom.length);
        for (int i = 0; i < nodeNameNameAtom.length; i++) {
            out.writeInt(nodeNameNameAtom[i]);
            out.writeInt(nodeNameNamespaceAtom[i]);
        }
    } else {
        out.writeInt(0);
    }
    out.writeInt(numNodeReps);
    if (numNodeReps > 0) {
        for (int i = 0; i < numNodeReps; i++) {
            out.writeLong(nodeOrdinal[i]);
            out.writeByte(nodeKind[i]);
            out.writeInt(nodeRepID[i]);
            out.writeInt(nodeParentNodeRepID[i]);
        }
    }
    if (elemNodeNodeNameRepID != null && elemNodeNodeNameRepID.length > 0) {
        out.writeInt(elemNodeNodeNameRepID.length);
        for (int i = 0; i < elemNodeNodeNameRepID.length; i++) {
            out.writeInt(elemNodeNodeNameRepID[i]);
            out.writeInt(elemNodeAttrNodeRepID[i]);
            out.writeInt(elemNodeChildNodeRepID[i]);
            out.writeInt(elemNodeElemDeclRepID[i]);
            out.writeInt(elemNodeNumAttributes[i]);
            out.writeInt(elemNodeNumDefaultAttrs[i]);
            out.writeInt(elemNodeNumChildren[i]);
            out.writeInt(elemNodeFlags[i]);
        }
    } else {
        out.writeInt(0);
    }
    if (attrNodeNodeNameRepID != null && attrNodeNodeNameRepID.length > 0) {
        out.writeInt(attrNodeNodeNameRepID.length);
        for (int i = 0; i < attrNodeNodeNameRepID.length; i++) {
            out.writeInt(attrNodeNodeNameRepID[i]);
            out.writeInt(attrNodeTextRepID[i]);
            out.writeInt(attrNodeAttrDeclRepID[i]);
        }
    } else {
        out.writeInt(0);
    }
    out.writeInt(numLinkNodeReps);
    if (numLinkNodeReps > 0) {
        for (int i = 0; i < numLinkNodeReps; i++) {
            out.writeLong(linkNodeKey[i]);
            out.writeLong(linkNodeNodeCount[i]);
            out.writeInt(linkNodeNodeNameRepID[i]);
            out.writeInt(linkNodeNodeRepID[i]);
        }
    }
    if (docNodeTextRepID != null && docNodeTextRepID.length > 0) {
        out.writeInt(docNodeTextRepID.length);
        for (int i = 0; i < docNodeTextRepID.length; i++) {
            out.writeInt(docNodeTextRepID[i]);
            out.writeInt(docNodeChildNodeRepID[i]);
            out.writeInt(docNodeNumChildren[i]);
        }
    } else {
        out.writeInt(0);
    }
    if (piNodeTargetAtom != null && piNodeTargetAtom.length > 0) {
        out.writeInt(piNodeTargetAtom.length);
        for (int i = 0; i < piNodeTargetAtom.length; i++) {
            out.writeInt(piNodeTargetAtom[i]);
            out.writeInt(piNodeTextRepID[i]);
        }
    } else {
        out.writeInt(0);
    }
    out.writeInt(numNSNodeReps);
    if (numNSNodeReps > 0) {
        for (int i = 0; i < numNSNodeReps; i++) {
            out.writeLong(nsNodeOrdinal[i]);
            out.writeInt(nsNodePrevNSNodeRepID[i]);
            out.writeInt(nsNodePrefixAtom[i]);
            out.writeInt(nsNodeUriAtom[i]);
        }
    }
    // skip permission node since it's not exposed to the API
    out.writeInt(uriTextRepID);
    out.writeInt(colsTextRepID);
    out.writeInt(numTextReps);
    if (numTextReps > 0) {
        for (int i = 0; i < numTextReps; i++) {
            out.writeInt(textReps[i]);
        }
    }
    if (arrayNodeTextRepID != null && arrayNodeTextRepID.length > 0) {
        out.writeInt(arrayNodeTextRepID.length);
        for (int i = 0; i < arrayNodeTextRepID.length; i++) {
            out.writeInt(arrayNodeTextRepID[i]);
            out.writeInt(arrayNodeChildNodeRepID[i]);
            out.writeInt(arrayNodeNumChildren[i]);
        }
    } else {
        out.writeInt(0);
    }
    if (doubles != null && doubles.length > 0) {
        out.writeInt(doubles.length);
        for (int i = 0; i < doubles.length; i++) {
            out.writeDouble(doubles[i]);
        }
    } else {
        out.writeInt(0);
    }

}

From source file:dk.statsbiblioteket.util.LineReaderTest.java

public void writeSample(DataOutput out) throws Exception {
    out.writeInt(12345);/*ww w  . j  a va  2s .  c  o m*/
    out.writeInt(-87);
    out.writeLong(123456789L);
    out.write("Hello World!\n".getBytes("utf-8"));
    out.write("Another world\n".getBytes("utf-8"));
    out.writeFloat(0.5f);
    out.writeBoolean(true);
    out.writeBoolean(false);
    out.writeByte(12);
    out.writeByte(-12);
    out.write(129);
    out.writeShort(-4567);
    out.writeBytes("ASCII");
}

From source file:com.ibm.bi.dml.runtime.matrix.data.MatrixBlock.java

/**
 * //from   w  w w .  j  av  a  2  s  .co m
 * @param out
 * @throws IOException
 */
private void writeDenseBlock(DataOutput out) throws IOException {
    out.writeByte(BlockType.DENSE_BLOCK.ordinal());

    int limit = rlen * clen;
    if (out instanceof MatrixBlockDataOutput) //fast serialize
        ((MatrixBlockDataOutput) out).writeDoubleArray(limit, denseBlock);
    else //general case (if fast serialize not supported)
        for (int i = 0; i < limit; i++)
            out.writeDouble(denseBlock[i]);
}