Example usage for java.io DataInputStream readInt

List of usage examples for java.io DataInputStream readInt

Introduction

In this page you can find the example usage for java.io DataInputStream readInt.

Prototype

public final int readInt() throws IOException 

Source Link

Document

See the general contract of the readInt method of DataInput.

Usage

From source file:com.linkedin.pinot.common.utils.DataTable.java

private Map<String, String> deserializeMetadata(byte[] buffer) {
    Map<String, String> map = new HashMap<String, String>();
    try {//www  .  j  a v  a2  s .c  o  m
        final ByteArrayInputStream bais = new ByteArrayInputStream(buffer);
        final DataInputStream in = new DataInputStream(bais);
        int size = in.readInt();
        for (int i = 0; i < size; i++) {
            Integer keyLength = in.readInt();
            byte[] keyBytes = new byte[keyLength];
            in.read(keyBytes);
            int valueLength = in.readInt();
            byte[] valueBytes = new byte[valueLength];
            in.read(valueBytes);
            map.put(new String(keyBytes, UTF8), new String(valueBytes, UTF8));
        }
    } catch (Exception e) {
        LOGGER.error("Exception while deserializing dictionary", e);
    }
    return map;
}

From source file:org.apache.hama.monitor.ZKCollector.java

@Override
public MetricsRecord harvest() throws Exception {
    final String path = this.reference.get().path;
    final ZooKeeper zk = this.reference.get().zk;
    LOG.debug("Searching " + path + " in zookeeper.");
    Stat stat = zk.exists(path, false);// w w w  . j  a  va  2s  . c  o m
    if (null == stat)
        return null; // no need to collect data.
    List<String> children = zk.getChildren(path, false);
    if (LOG.isDebugEnabled()) {
        LOG.debug("Leaves size is " + children.size() + " total znodes in list: " + children);
    }

    // TODO: metrics record contains multiple metrics (1 to many)
    // data is stored under zk e.g. /path/to/metrics/jvm/...
    // within jvm folder metrics is stored in a form of name, value pair
    final MetricsRecord record = reference.get().record;
    if (null != children) {
        for (String child : children) {
            LOG.info("metrics -> " + child);
            // <metricsName_d> indicates data type is double
            String dataType = suffix(child);
            byte[] dataInBytes = zk.getData(path + "/" + child, false, stat);
            if (LOG.isDebugEnabled()) {
                LOG.debug("Data length (in byte): " + dataInBytes.length);
            }
            DataInputStream input = null;
            try {
                String name = removeSuffix(child);
                input = new DataInputStream(new ByteArrayInputStream(dataInBytes));
                if ("d".equals(dataType)) {
                    double dv = input.readDouble();
                    LOG.info("metrics " + name + " value:" + dv);
                    record.add(new Metric<Double>(name, dv));
                } else if ("f".equals(dataType)) {
                    float fv = input.readFloat();
                    LOG.info("metrics " + name + " value:" + fv);
                    record.add(new Metric<Float>(name, fv));
                } else if ("i".equals(dataType)) {
                    int iv = input.readInt();
                    LOG.info("metrics " + name + " value:" + iv);
                    record.add(new Metric<Integer>(name, iv));
                } else if ("l".equals(dataType)) {
                    long lv = input.readLong();
                    LOG.info("metrics " + name + " value:" + lv);
                    record.add(new Metric<Long>(name, lv));
                } else if ("b".equals(dataType)) {
                    LOG.info("metrics" + name + " value:" + Arrays.toString(dataInBytes));
                    record.add(new Metric<byte[]>(name, dataInBytes));
                } else {
                    LOG.warn("Unkown data type for metrics name: " + child);
                }
            } finally {
                input.close();
            }
        }
    }
    return record;
}

From source file:com.facebook.infrastructure.db.SuperColumn.java

public IColumn deserialize(DataInputStream dis, String name, IFilter filter) throws IOException {
    if (dis.available() == 0)
        return null;

    String[] names = RowMutation.getColumnAndColumnFamily(name);
    if (names.length == 1) {
        IColumn superColumn = defreezeSuperColumn(dis);
        if (name.equals(superColumn.name())) {
            if (!superColumn.isMarkedForDelete()) {
                /* read the number of columns stored */
                int size = dis.readInt();
                /* read the size of all columns */
                dis.readInt();//from   w w w  .  j a  v  a 2 s.c o  m
                IColumn column = null;
                for (int i = 0; i < size; ++i) {
                    column = Column.serializer().deserialize(dis, filter);
                    if (column != null) {
                        superColumn.addColumn(column.name(), column);
                        column = null;
                        if (filter.isDone()) {
                            break;
                        }
                    }
                }
            }
            return superColumn;
        } else {
            /* read the number of columns stored */
            dis.readInt();
            /* read the size of all columns to skip */
            int size = dis.readInt();
            dis.skip(size);
            return null;
        }
    }

    SuperColumn superColumn = defreezeSuperColumn(dis);
    if (!superColumn.isMarkedForDelete()) {
        int size = dis.readInt();
        /* skip the size of the columns */
        dis.readInt();
        if (size > 0) {
            for (int i = 0; i < size; ++i) {
                IColumn subColumn = Column.serializer().deserialize(dis, names[1], filter);
                if (subColumn != null) {
                    superColumn.addColumn(subColumn.name(), subColumn);
                    break;
                }
            }
        }
    }
    return superColumn;
}

From source file:darks.learning.word2vec.Word2Vec.java

/**
 * Load model file//from   w  ww .  j a va 2  s.c  om
 * 
 * @param file Model file
 */
public void loadModel(File file) {
    DataInputStream dis = null;
    try {
        log.info("Reading word2vec model from " + file);
        dis = new DataInputStream(new BufferedInputStream(new FileInputStream(file)));
        int wordSize = dis.readInt();
        int featureSize = dis.readInt();
        wordNodes.clear();
        for (int i = 0; i < wordSize; i++) {
            DoubleMatrix feature = new DoubleMatrix(featureSize);
            int nameLen = dis.readInt();
            byte[] bits = new byte[nameLen];
            dis.read(bits, 0, nameLen);
            String name = new String(bits);
            double len = 0;
            for (int f = 0; f < featureSize; f++) {
                double w = dis.readDouble();
                feature.put(f, w);
                len += w * w;
            }
            len = FastMath.sqrt(len);
            //            feature.divi(len);
            wordNodes.put(name, new WordNode(name, feature));
        }
        log.info("Succeed to read word2vec model. Word dictionary size " + wordNodes.size());
    } catch (Exception e) {
        log.error(e.getMessage(), e);
    } finally {
        IOUtils.closeStream(dis);
    }
}

From source file:com.linkedin.pinot.common.utils.DataTable.java

private Map<String, Map<Integer, String>> deserializeDictionary(byte[] buffer) {
    Map<String, Map<Integer, String>> map = new HashMap<String, Map<Integer, String>>();
    try {//from  w ww  . j av a  2 s. co  m
        final ByteArrayInputStream bais = new ByteArrayInputStream(buffer);
        final DataInputStream in = new DataInputStream(bais);
        int size = in.readInt();
        byte[] temp;
        for (int i = 0; i < size; i++) {
            int readLength = in.readInt();
            temp = new byte[readLength];
            in.read(temp);
            Map<Integer, String> childMap = new HashMap<Integer, String>();
            map.put(new String(temp, UTF8), childMap);
            int childMapSize = in.readInt();
            for (int j = 0; j < childMapSize; j++) {
                Integer key = in.readInt();
                int valueLength = in.readInt();
                temp = new byte[valueLength];
                in.read(temp);
                childMap.put(key, new String(temp, UTF8));
            }
        }
    } catch (Exception e) {
        LOGGER.error("Exception while deserializing dictionary", e);
    }
    return map;
}

From source file:org.apache.hadoop.yarn.server.resourcemanager.recovery.LeveldbRMStateStore.java

private void loadRMDTSecretManagerTokenSequenceNumber(RMState state) throws IOException {
    byte[] data = null;
    try {//  w  w  w .  ja  va 2s.co m
        data = db.get(bytes(RM_DT_SEQUENCE_NUMBER_KEY));
    } catch (DBException e) {
        throw new IOException(e);
    }
    if (data != null) {
        DataInputStream in = new DataInputStream(new ByteArrayInputStream(data));
        try {
            state.rmSecretManagerState.dtSequenceNumber = in.readInt();
        } finally {
            IOUtils.cleanup(LOG, in);
        }
    }
}

From source file:be.ibridge.kettle.trans.step.sortrows.SortRows.java

private Row getBuffer() {
    int i, f;/*from w w  w .  j  ava2s . c  om*/
    int smallest;
    Row r1, r2;
    Row retval;

    // Open all files at once and read one row from each file...
    if (data.files.size() > 0 && (data.dis.size() == 0 || data.fis.size() == 0)) {
        logBasic("Opening " + data.files.size() + " tmp-files...");

        try {
            for (f = 0; f < data.files.size() && !isStopped(); f++) {
                FileObject fileObject = (FileObject) data.files.get(f);
                String filename = KettleVFS.getFilename(fileObject);
                if (log.isDetailed())
                    logDetailed("Opening tmp-file: [" + filename + "]");
                InputStream fi = fileObject.getContent().getInputStream();
                DataInputStream di;
                data.fis.add(fi);
                if (meta.getCompress()) {
                    GZIPInputStream gzfi = new GZIPInputStream(new BufferedInputStream(fi));
                    di = new DataInputStream(gzfi);
                    data.gzis.add(gzfi);
                } else {
                    di = new DataInputStream(fi);
                }
                data.dis.add(di);

                // How long is the buffer?
                int buffersize = di.readInt();

                if (log.isDetailed())
                    logDetailed("[" + filename + "] expecting " + buffersize + " rows...");

                if (buffersize > 0) {
                    // Read a row from each temp-file
                    Row metadata = (Row) data.rowMeta.get(f);
                    data.rowbuffer.add(new Row(di, metadata.size(), metadata)); // new row
                }
            }
        } catch (Exception e) {
            logError("Error reading back tmp-files : " + e.toString());
            e.printStackTrace();
        }
    }

    if (data.files.size() == 0) {
        if (data.buffer.size() > 0) {
            retval = (Row) data.buffer.get(0);
            data.buffer.remove(0);
        } else {
            retval = null;
        }
    } else {
        if (data.rowbuffer.size() == 0) {
            retval = null;
        } else {
            // We now have "filenr" rows waiting: which one is the smallest?
            //
            for (i = 0; i < data.rowbuffer.size() && !isStopped(); i++) {
                Row b = (Row) data.rowbuffer.get(i);
                if (log.isRowLevel())
                    logRowlevel("--BR#" + i + ": " + b.toString());
            }
            //

            smallest = 0;
            r1 = (Row) data.rowbuffer.get(smallest);
            for (f = 1; f < data.rowbuffer.size() && !isStopped(); f++) {
                r2 = (Row) data.rowbuffer.get(f);

                if (r2 != null && r2.compare(r1, data.fieldnrs, meta.getAscending()) < 0) {
                    smallest = f;
                    r1 = (Row) data.rowbuffer.get(smallest);
                }
            }
            retval = r1;

            data.rowbuffer.remove(smallest);
            if (log.isRowLevel())
                logRowlevel("Smallest row selected on [" + smallest + "] : " + retval);

            // now get another Row for position smallest

            FileObject file = (FileObject) data.files.get(smallest);
            DataInputStream di = (DataInputStream) data.dis.get(smallest);
            InputStream fi = (InputStream) data.fis.get(smallest);
            GZIPInputStream gzfi = (meta.getCompress()) ? (GZIPInputStream) data.gzis.get(smallest) : null;

            try {
                Row metadata = (Row) data.rowMeta.get(smallest);
                data.rowbuffer.add(smallest, new Row(di, metadata.size(), metadata));
            } catch (KettleFileException fe) // empty file or EOF mostly
            {
                try {
                    di.close();
                    fi.close();
                    if (gzfi != null)
                        gzfi.close();
                    file.delete();
                } catch (IOException e) {
                    logError("Unable to close/delete file #" + smallest + " --> " + file.toString());
                    setErrors(1);
                    stopAll();
                    return null;
                }

                data.files.remove(smallest);
                data.dis.remove(smallest);
                data.fis.remove(smallest);
                if (gzfi != null)
                    data.gzis.remove(smallest);
                data.rowMeta.remove(smallest);
            }
        }
    }
    return retval;
}

From source file:org.apache.jackrabbit.core.persistence.bundle.util.BundleBinding.java

/**
 * Deserializes a <code>PropertyState</code> from the data input stream.
 *
 * @param in the input stream/*from  ww  w. jav a  2  s .co m*/
 * @param id the property id for the new property entry
 * @return the property entry
 * @throws IOException if an I/O error occurs.
 */
public NodePropBundle.PropertyEntry readPropertyEntry(DataInputStream in, PropertyId id) throws IOException {
    NodePropBundle.PropertyEntry entry = new NodePropBundle.PropertyEntry(id);
    // type and modcount
    int type = in.readInt();
    entry.setModCount((short) ((type >> 16) & 0x0ffff));
    type &= 0x0ffff;
    entry.setType(type);

    // multiValued
    entry.setMultiValued(in.readBoolean());
    // definitionId
    in.readUTF();
    // values
    int count = in.readInt(); // count
    InternalValue[] values = new InternalValue[count];
    String[] blobIds = new String[count];
    for (int i = 0; i < count; i++) {
        InternalValue val;
        switch (type) {
        case PropertyType.BINARY:
            int size = in.readInt();
            if (size == BINARY_IN_DATA_STORE) {
                val = InternalValue.create(dataStore, in.readUTF());
            } else if (size == BINARY_IN_BLOB_STORE) {
                blobIds[i] = in.readUTF();
                try {
                    if (blobStore instanceof ResourceBasedBLOBStore) {
                        val = InternalValue
                                .create(((ResourceBasedBLOBStore) blobStore).getResource(blobIds[i]));
                    } else {
                        val = InternalValue.create(blobStore.get(blobIds[i]));
                    }
                } catch (IOException e) {
                    if (errorHandling.ignoreMissingBlobs()) {
                        log.warn("Ignoring error while reading blob-resource: " + e);
                        val = InternalValue.create(new byte[0]);
                    } else {
                        throw e;
                    }
                } catch (Exception e) {
                    throw new IOException("Unable to create property value: " + e.toString());
                }
            } else {
                // short values into memory
                byte[] data = new byte[size];
                in.readFully(data);
                val = InternalValue.create(data);
            }
            break;
        case PropertyType.DOUBLE:
            val = InternalValue.create(in.readDouble());
            break;
        case PropertyType.LONG:
            val = InternalValue.create(in.readLong());
            break;
        case PropertyType.BOOLEAN:
            val = InternalValue.create(in.readBoolean());
            break;
        case PropertyType.NAME:
            val = InternalValue.create(readQName(in));
            break;
        case PropertyType.REFERENCE:
            val = InternalValue.create(readUUID(in));
            break;
        default:
            // because writeUTF(String) has a size limit of 64k,
            // Strings are serialized as <length><byte[]>
            int len = in.readInt();
            byte[] bytes = new byte[len];
            in.readFully(bytes);
            val = InternalValue.valueOf(new String(bytes, "UTF-8"), type);
        }
        values[i] = val;
    }
    entry.setValues(values);
    entry.setBlobIds(blobIds);

    return entry;
}

From source file:org.apache.jackrabbit.core.persistence.bundle.util.BundleBinding.java

/**
 * Checks a <code>PropertyState</code> from the data input stream.
 *
 * @param in the input stream//from w ww .j  a  v a2  s . co  m
 * @return <code>true</code> if the data is valid;
 *         <code>false</code> otherwise.
 */
public boolean checkPropertyState(DataInputStream in) {
    int type;
    try {
        type = in.readInt();
        short modCount = (short) ((type >> 16) | 0xffff);
        type &= 0xffff;
        log.debug("  PropertyType: " + PropertyType.nameFromValue(type));
        log.debug("  ModCount: " + modCount);
    } catch (IOException e) {
        log.error("Error while reading property type: " + e);
        return false;
    }
    try {
        boolean isMV = in.readBoolean();
        log.debug("  MultiValued: " + isMV);
    } catch (IOException e) {
        log.error("Error while reading multivalued: " + e);
        return false;
    }
    try {
        String defintionId = in.readUTF();
        log.debug("  DefinitionId: " + defintionId);
    } catch (IOException e) {
        log.error("Error while reading definition id: " + e);
        return false;
    }

    int count;
    try {
        count = in.readInt();
        log.debug("  num values: " + count);
    } catch (IOException e) {
        log.error("Error while reading number of values: " + e);
        return false;
    }
    for (int i = 0; i < count; i++) {
        switch (type) {
        case PropertyType.BINARY:
            int size;
            try {
                size = in.readInt();
                log.debug("  binary size: " + size);
            } catch (IOException e) {
                log.error("Error while reading size of binary: " + e);
                return false;
            }
            if (size == BINARY_IN_DATA_STORE) {
                try {
                    String s = in.readUTF();
                    // truncate log output
                    if (s.length() > 80) {
                        s = s.substring(80) + "...";
                    }
                    log.debug("  global data store id: " + s);
                } catch (IOException e) {
                    log.error("Error while reading blob id: " + e);
                    return false;
                }
            } else if (size == BINARY_IN_BLOB_STORE) {
                try {
                    String s = in.readUTF();
                    log.debug("  blobid: " + s);
                } catch (IOException e) {
                    log.error("Error while reading blob id: " + e);
                    return false;
                }
            } else {
                // short values into memory
                byte[] data = new byte[size];
                try {
                    in.readFully(data);
                    log.debug("  binary: " + data.length + " bytes");
                } catch (IOException e) {
                    log.error("Error while reading inlined binary: " + e);
                    return false;
                }
            }
            break;
        case PropertyType.DOUBLE:
            try {
                double d = in.readDouble();
                log.debug("  double: " + d);
            } catch (IOException e) {
                log.error("Error while reading double value: " + e);
                return false;
            }
            break;
        case PropertyType.DECIMAL:
            try {
                BigDecimal d = readDecimal(in);
                log.debug("  decimal: " + d);
            } catch (IOException e) {
                log.error("Error while reading decimal value: " + e);
                return false;
            }
            break;
        case PropertyType.LONG:
            try {
                double l = in.readLong();
                log.debug("  long: " + l);
            } catch (IOException e) {
                log.error("Error while reading long value: " + e);
                return false;
            }
            break;
        case PropertyType.BOOLEAN:
            try {
                boolean b = in.readBoolean();
                log.debug("  boolean: " + b);
            } catch (IOException e) {
                log.error("Error while reading boolean value: " + e);
                return false;
            }
            break;
        case PropertyType.NAME:
            try {
                Name name = readQName(in);
                log.debug("  name: " + name);
            } catch (IOException e) {
                log.error("Error while reading name value: " + e);
                return false;
            }
            break;
        case PropertyType.WEAKREFERENCE:
        case PropertyType.REFERENCE:
            try {
                NodeId id = readID(in);
                log.debug("  reference: " + id);
            } catch (IOException e) {
                log.error("Error while reading reference value: " + e);
                return false;
            }
            break;
        default:
            // because writeUTF(String) has a size limit of 64k,
            // Strings are serialized as <length><byte[]>
            int len;
            try {
                len = in.readInt();
                log.debug("  size of string value: " + len);
            } catch (IOException e) {
                log.error("Error while reading size of string value: " + e);
                return false;
            }
            try {
                byte[] bytes = new byte[len];
                in.readFully(bytes);
                String s = new String(bytes, "UTF-8");
                // truncate log output
                if (s.length() > 80) {
                    s = s.substring(80) + "...";
                }
                log.debug("  string: " + s);
            } catch (IOException e) {
                log.error("Error while reading string value: " + e);
                return false;
            }
        }
    }
    return true;
}

From source file:org.apache.jackrabbit.core.persistence.bundle.util.BundleBinding.java

/**
 * Checks a <code>PropertyState</code> from the data input stream.
 *
 * @param in the input stream/*from  w  w w . jav  a2 s  .c o  m*/
 * @return <code>true</code> if the data is valid;
 *         <code>false</code> otherwise.
 */
public boolean checkPropertyState(DataInputStream in) {
    int type;
    try {
        type = in.readInt();
        short modCount = (short) ((type >> 16) | 0xffff);
        type &= 0xffff;
        log.debug("  PropertyType: " + PropertyType.nameFromValue(type));
        log.debug("  ModCount: " + modCount);
    } catch (IOException e) {
        log.error("Error while reading property type: " + e);
        return false;
    }
    try {
        boolean isMV = in.readBoolean();
        log.debug("  MultiValued: " + isMV);
    } catch (IOException e) {
        log.error("Error while reading multivalued: " + e);
        return false;
    }
    try {
        String defintionId = in.readUTF();
        log.debug("  DefinitionId: " + defintionId);
    } catch (IOException e) {
        log.error("Error while reading definition id: " + e);
        return false;
    }

    int count;
    try {
        count = in.readInt();
        log.debug("  num values: " + count);
    } catch (IOException e) {
        log.error("Error while reading number of values: " + e);
        return false;
    }
    for (int i = 0; i < count; i++) {
        switch (type) {
        case PropertyType.BINARY:
            int size;
            try {
                size = in.readInt();
                log.debug("  binary size: " + size);
            } catch (IOException e) {
                log.error("Error while reading size of binary: " + e);
                return false;
            }
            if (size == BINARY_IN_DATA_STORE) {
                try {
                    String s = in.readUTF();
                    // truncate log output
                    if (s.length() > 80) {
                        s = s.substring(80) + "...";
                    }
                    log.debug("  global data store id: " + s);
                } catch (IOException e) {
                    log.error("Error while reading blob id: " + e);
                    return false;
                }
            } else if (size == BINARY_IN_BLOB_STORE) {
                try {
                    String s = in.readUTF();
                    log.debug("  blobid: " + s);
                } catch (IOException e) {
                    log.error("Error while reading blob id: " + e);
                    return false;
                }
            } else {
                // short values into memory
                byte[] data = new byte[size];
                try {
                    in.readFully(data);
                    log.debug("  binary: " + data.length + " bytes");
                } catch (IOException e) {
                    log.error("Error while reading inlined binary: " + e);
                    return false;
                }
            }
            break;
        case PropertyType.DOUBLE:
            try {
                double d = in.readDouble();
                log.debug("  double: " + d);
            } catch (IOException e) {
                log.error("Error while reading double value: " + e);
                return false;
            }
            break;
        case PropertyType.DECIMAL:
            try {
                BigDecimal d = readDecimal(in);
                log.debug("  decimal: " + d);
            } catch (IOException e) {
                log.error("Error while reading decimal value: " + e);
                return false;
            }
            break;
        case PropertyType.LONG:
            try {
                double l = in.readLong();
                log.debug("  long: " + l);
            } catch (IOException e) {
                log.error("Error while reading long value: " + e);
                return false;
            }
            break;
        case PropertyType.BOOLEAN:
            try {
                boolean b = in.readBoolean();
                log.debug("  boolean: " + b);
            } catch (IOException e) {
                log.error("Error while reading boolean value: " + e);
                return false;
            }
            break;
        case PropertyType.NAME:
            try {
                Name name = readQName(in);
                log.debug("  name: " + name);
            } catch (IOException e) {
                log.error("Error while reading name value: " + e);
                return false;
            }
            break;
        case PropertyType.WEAKREFERENCE:
        case PropertyType.REFERENCE:
            try {
                UUID uuid = readUUID(in);
                log.debug("  reference: " + uuid);
            } catch (IOException e) {
                log.error("Error while reading reference value: " + e);
                return false;
            }
            break;
        default:
            // because writeUTF(String) has a size limit of 64k,
            // Strings are serialized as <length><byte[]>
            int len;
            try {
                len = in.readInt();
                log.debug("  size of string value: " + len);
            } catch (IOException e) {
                log.error("Error while reading size of string value: " + e);
                return false;
            }
            try {
                byte[] bytes = new byte[len];
                in.readFully(bytes);
                String s = new String(bytes, "UTF-8");
                // truncate log output
                if (s.length() > 80) {
                    s = s.substring(80) + "...";
                }
                log.debug("  string: " + s);
            } catch (IOException e) {
                log.error("Error while reading string value: " + e);
                return false;
            }
        }
    }
    return true;
}