Example usage for java.io DataInputStream readInt

List of usage examples for java.io DataInputStream readInt

Introduction

In this page you can find the example usage for java.io DataInputStream readInt.

Prototype

public final int readInt() throws IOException 

Source Link

Document

See the general contract of the readInt method of DataInput.

Usage

From source file:com.koushikdutta.superuser.MultitaskSuRequestActivity.java

void manageSocket() {
    new Thread() {
        @Override//from  w ww .  jav a 2  s  .  c om
        public void run() {
            try {
                mSocket = new LocalSocket();
                mSocket.connect(new LocalSocketAddress(mSocketPath, Namespace.FILESYSTEM));

                DataInputStream is = new DataInputStream(mSocket.getInputStream());

                ContentValues payload = new ContentValues();

                for (int i = 0; i < SU_PROTOCOL_PARAM_MAX; i++) {
                    int nameLen = is.readInt();
                    if (nameLen > SU_PROTOCOL_NAME_MAX)
                        throw new IllegalArgumentException("name length too long: " + nameLen);
                    byte[] nameBytes = new byte[nameLen];
                    is.readFully(nameBytes);
                    String name = new String(nameBytes);
                    int dataLen = is.readInt();
                    if (dataLen > getValueMax(name))
                        throw new IllegalArgumentException(name + " data length too long: " + dataLen);
                    byte[] dataBytes = new byte[dataLen];
                    is.readFully(dataBytes);
                    String data = new String(dataBytes);
                    payload.put(name, data);
                    //                        Log.i(LOGTAG, name);
                    //                        Log.i(LOGTAG, data);
                    if ("eof".equals(name))
                        break;
                }

                int protocolVersion = payload.getAsInteger("version");
                mCallerUid = payload.getAsInteger("from.uid");
                mDesiredUid = payload.getAsByte("to.uid");
                mDesiredCmd = payload.getAsString("command");
                String calledBin = payload.getAsString("from.bin");
                mPid = payload.getAsInteger("pid");
                runOnUiThread(new Runnable() {
                    @Override
                    public void run() {
                        mRequestReady = true;
                        requestReady();
                    }
                });

                if ("com.koushikdutta.superuser".equals(getPackageName())) {
                    if (!SuHelper.CURRENT_VERSION.equals(payload.getAsString("binary.version")))
                        SuCheckerReceiver.doNotification(MultitaskSuRequestActivity.this);
                }
            } catch (Exception ex) {
                Log.i(LOGTAG, ex.getMessage(), ex);
                try {
                    mSocket.close();
                } catch (Exception e) {
                }
                runOnUiThread(new Runnable() {
                    @Override
                    public void run() {
                        finish();
                    }
                });
            }
        }
    }.start();
}

From source file:org.sakaiproject.nakamura.auth.trusted.TokenStore.java

/**
 *
 *///from  w  w w  .  j av a2 s . c  om
private void loadLocalSecretKeys() {
    FileInputStream fin = null;
    DataInputStream keyInputStream = null;
    try {
        fin = new FileInputStream(tokenFile);
        keyInputStream = new DataInputStream(fin);
        int newCurrentToken = keyInputStream.readInt();
        long newNextUpdate = keyInputStream.readLong();
        ExpiringSecretKey[] newKeys = new ExpiringSecretKey[5];
        for (int i = 0; i < newKeys.length; i++) {
            int isNull = keyInputStream.readInt();
            if (isNull == 1) {
                long expires = keyInputStream.readLong();
                String keyServerId = keyInputStream.readUTF();
                int l = keyInputStream.readInt();
                byte[] b = new byte[l];
                if (keyInputStream.read(b) != l) {
                    throw new IOException(
                            "Failed to read Key no " + i + " from Secret Keys, end of file reached ");
                }
                newKeys[i] = new ExpiringSecretKey(b, HMAC_SHA1, expires, keyServerId);
                getServerKeyCache().put(getCacheKey(keyServerId, i), newKeys[i].getSecretKeyData());
                LOG.info("Loaded Key {} from Local Store into {} ", getCacheKey(keyServerId, i),
                        getServerKeyCache());
            } else {
                newKeys[i] = null;
            }
        }
        keyInputStream.close();
        nextUpdate = newNextUpdate;
        secretKeyId = newCurrentToken;
        secretKeyRingBuffer = newKeys;

    } catch (IOException e) {
        LOG.error("Failed to load cookie keys " + e.getMessage());
    } finally {
        try {
            keyInputStream.close();
        } catch (Exception e) {
        }
        try {
            fin.close();
        } catch (Exception e) {
        }
    }
    if (secretKeyRingBuffer == null) {
        secretKeyRingBuffer = new ExpiringSecretKey[5];
        nextUpdate = System.currentTimeMillis();
        secretKeyId = 0;
    }
    if (debugCookies) {
        dumpSecretKeyRingBuffer(secretKeyRingBuffer);
    }
}

From source file:org.apache.hadoop.hbase.io.hfile.FixedFileTrailer.java

/**
 * Deserialize the fixed file trailer from the given stream. The version needs
 * to already be specified. Make sure this is consistent with
 * {@link #serialize(DataOutputStream)}.
 *
 * @param inputStream/*  w  w w  .  j ava  2 s.  c  om*/
 * @param version
 * @throws IOException
 */
void deserialize(DataInputStream inputStream) throws IOException {
    HFile.checkFormatVersion(majorVersion);

    BlockType.TRAILER.readAndCheck(inputStream);

    fileInfoOffset = inputStream.readLong();
    loadOnOpenDataOffset = inputStream.readLong();
    dataIndexCount = inputStream.readInt();

    if (majorVersion == 1) {
        inputStream.readLong(); // Read and skip metaIndexOffset.
    } else {
        uncompressedDataIndexSize = inputStream.readLong();
    }
    metaIndexCount = inputStream.readInt();

    totalUncompressedBytes = inputStream.readLong();
    entryCount = majorVersion == 1 ? inputStream.readInt() : inputStream.readLong();
    compressionCodec = Compression.Algorithm.values()[inputStream.readInt()];
    if (majorVersion > 1) {
        numDataIndexLevels = inputStream.readInt();
        firstDataBlockOffset = inputStream.readLong();
        lastDataBlockOffset = inputStream.readLong();
        comparatorClassName = Bytes.readStringFixedSize(inputStream, MAX_COMPARATOR_NAME_LENGTH);
    }

    int version = inputStream.readInt();
    expectMajorVersion(extractMajorVersion(version));
    expectMinorVersion(extractMinorVersion(version));
}

From source file:org.chromium.chrome.browser.tabmodel.TabPersistentStore.java

/**
 * Extracts the tab information from a given tab state stream.
 *
 * @param stream   The stream pointing to the tab state file to be parsed.
 * @param callback A callback to be streamed updates about the tab state information being read.
 * @param tabIds   A mapping of tab ID to whether the tab is an off the record tab.
 * @param forMerge Whether this state file was read as part of a merge.
 * @return The next available tab ID based on the maximum ID referenced in this state file.
 *///from   w w  w. j ava2s  .  co  m
public static int readSavedStateFile(DataInputStream stream, @Nullable OnTabStateReadCallback callback,
        @Nullable SparseBooleanArray tabIds, boolean forMerge) throws IOException {
    if (stream == null)
        return 0;
    long time = SystemClock.uptimeMillis();
    int nextId = 0;
    boolean skipUrlRead = false;
    boolean skipIncognitoCount = false;
    final int version = stream.readInt();
    if (version != SAVED_STATE_VERSION) {
        // We don't support restoring Tab data from before M18.
        if (version < 3)
            return 0;
        // Older versions are missing newer data.
        if (version < 5)
            skipIncognitoCount = true;
        if (version < 4)
            skipUrlRead = true;
    }

    final int count = stream.readInt();
    final int incognitoCount = skipIncognitoCount ? -1 : stream.readInt();
    final int incognitoActiveIndex = stream.readInt();
    final int standardActiveIndex = stream.readInt();
    if (count < 0 || incognitoActiveIndex >= count || standardActiveIndex >= count) {
        throw new IOException();
    }

    for (int i = 0; i < count; i++) {
        int id = stream.readInt();
        String tabUrl = skipUrlRead ? "" : stream.readUTF();
        if (id >= nextId)
            nextId = id + 1;
        if (tabIds != null)
            tabIds.append(id, true);

        Boolean isIncognito = (incognitoCount < 0) ? null : i < incognitoCount;
        if (callback != null) {
            callback.onDetailsRead(i, id, tabUrl, isIncognito, i == standardActiveIndex,
                    i == incognitoActiveIndex);
        }
    }

    if (forMerge) {
        logExecutionTime("ReadMergedStateTime", time);
        int tabCount = count + ((incognitoCount > 0) ? incognitoCount : 0);
        RecordHistogram.recordLinearCountHistogram("Android.TabPersistentStore.MergeStateTabCount", tabCount, 1,
                200, 200);
    }

    logExecutionTime("ReadSavedStateTime", time);

    return nextId;
}

From source file:org.mrgeo.vector.mrsvector.VectorTile.java

protected static BlobHeader parseHeader(final DataInputStream dis) throws IOException {
    final int len = dis.readInt();
    final byte[] blobHeader = new byte[len];
    dis.read(blobHeader);// w  w w  .j ava  2s . c o m

    return BlobHeader.parseFrom(blobHeader);
}

From source file:org.apache.cassandra.db.SuperColumn.java

public IColumn deserialize(DataInputStream dis, IFilter filter) throws IOException {
    if (dis.available() == 0)
        return null;

    IColumn superColumn = defreezeSuperColumn(dis);
    superColumn = filter.filter(superColumn, dis);
    if (superColumn != null) {
        fillSuperColumn(superColumn, dis);
        return superColumn;
    } else {/*  w  w  w .jav  a 2  s.  c  o m*/
        /* read the number of columns stored */
        dis.readInt();
        /* read the size of all columns to skip */
        int size = dis.readInt();
        dis.skip(size);
        return null;
    }
}

From source file:org.prorefactor.refactor.PUB.java

private void readFileIndex(DataInputStream in) throws IOException {
    int index;/*from  w  ww  .j a  v a 2s  .  co m*/
    String filename;
    for (;;) {
        index = in.readInt();
        filename = in.readUTF();
        if (index == -1)
            break;
        fileList.add(filename);
        fileIndexes.add(filename);
    }
}

From source file:com.panet.imeta.trans.steps.blockingstep.BlockingStep.java

private Object[] getBuffer() {
    Object[] retval;//from  w w  w .ja  va2 s.c  o  m

    // Open all files at once and read one row from each file...
    if (data.files.size() > 0 && (data.dis.size() == 0 || data.fis.size() == 0)) {
        if (log.isBasic())
            logBasic(Messages.getString("BlockingStep.Log.Openfiles"));

        try {
            FileObject fileObject = (FileObject) data.files.get(0);
            String filename = KettleVFS.getFilename(fileObject);
            if (log.isDetailed())
                logDetailed(Messages.getString("BlockingStep.Log.Openfilename1") + filename
                        + Messages.getString("BlockingStep.Log.Openfilename2"));
            InputStream fi = KettleVFS.getInputStream(fileObject);
            DataInputStream di;
            data.fis.add(fi);
            if (meta.getCompress()) {
                GZIPInputStream gzfi = new GZIPInputStream(new BufferedInputStream(fi));
                di = new DataInputStream(gzfi);
                data.gzis.add(gzfi);
            } else {
                di = new DataInputStream(fi);
            }
            data.dis.add(di);

            // How long is the buffer?
            int buffersize = di.readInt();

            if (log.isDetailed())
                logDetailed(Messages.getString("BlockingStep.Log.BufferSize1") + filename
                        + Messages.getString("BlockingStep.Log.BufferSize2") + buffersize + " "
                        + Messages.getString("BlockingStep.Log.BufferSize3"));

            if (buffersize > 0) {
                // Read a row from temp-file
                data.rowbuffer.add(data.outputRowMeta.readData(di));
            }
        } catch (Exception e) {
            logError(Messages.getString("BlockingStepMeta.ErrorReadingFile") + e.toString());
            logError(Const.getStackTracker(e));
        }
    }

    if (data.files.size() == 0) {
        if (data.buffer.size() > 0) {
            retval = (Object[]) data.buffer.get(0);
            data.buffer.remove(0);
        } else {
            retval = null;
        }
    } else {
        if (data.rowbuffer.size() == 0) {
            retval = null;
        } else {
            retval = (Object[]) data.rowbuffer.get(0);

            data.rowbuffer.remove(0);

            // now get another 
            FileObject file = (FileObject) data.files.get(0);
            DataInputStream di = (DataInputStream) data.dis.get(0);
            InputStream fi = (InputStream) data.fis.get(0);
            GZIPInputStream gzfi = (meta.getCompress()) ? (GZIPInputStream) data.gzis.get(0) : null;

            try {
                data.rowbuffer.add(0, data.outputRowMeta.readData(di));
            } catch (SocketTimeoutException e) {
                logError(Messages.getString("System.Log.UnexpectedError") + " : " + e.toString()); //$NON-NLS-1$ //$NON-NLS-2$
                logError(Const.getStackTracker(e));
                setErrors(1);
                stopAll();
            } catch (KettleFileException fe) // empty file or EOF mostly
            {
                try {
                    di.close();
                    fi.close();
                    if (gzfi != null)
                        gzfi.close();
                    file.delete();
                } catch (IOException e) {
                    logError(Messages.getString("BlockingStepMeta.UnableDeleteFile") + file.toString());
                    setErrors(1);
                    stopAll();
                    return null;
                }

                data.files.remove(0);
                data.dis.remove(0);
                data.fis.remove(0);
                if (gzfi != null)
                    data.gzis.remove(0);
            }
        }
    }
    return retval;
}

From source file:uk.ac.ox.webauth.Token.java

/**
 * Initialise a token with a base64 encoded Webauth token.
 * @param   tokenData   The data to be decrypted.
 * @param   sessionKey  The session key to use for the AES and Hmac.
 * @throws  GeneralSecurityException    if there was a problem with the security code used.
 *//*  w w w  .j a v  a 2  s .c o  m*/
public Token(byte[] tokenData, Key sessionKey) throws GeneralSecurityException {
    // a token is:
    // {key-hint}{nonce   }{hmac    }{token-attributes     }{padding         }
    // {4 bytes }{16 bytes}{20 bytes}{make the data into multiple of 16 bytes}
    // everything after the key hint is aes encrypted

    try {
        // set up some streams
        ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(tokenData);
        DataInputStream dataInputStream = new DataInputStream(byteArrayInputStream);

        // read the key hint
        int keyHint = dataInputStream.readInt();

        // prepare to AES decrypt the rest
        Cipher cipher = Cipher.getInstance("AES/CBC/NoPadding");
        cipher.init(DECRYPT_MODE, sessionKey, IV);
        CipherInputStream decrypt = new CipherInputStream(byteArrayInputStream, cipher);

        // throw away the nonce
        if (decrypt.read(new byte[16]) != 16) {
            throw new GeneralSecurityException("Failed to read nonce from token.");
        }

        // read the HMACSHA1 checksum
        byte[] checksum = new byte[20];
        if (decrypt.read(checksum) != 20) {
            throw new GeneralSecurityException("Failed to read HMAC SHA1 checksum from token.");
        }

        // read in the rest of the data
        ByteArrayOutputStream tokenByteArrayOutputStream = new ByteArrayOutputStream();
        for (int b = decrypt.read(); b != -1; b = decrypt.read()) {
            tokenByteArrayOutputStream.write(b);
        }
        byte[] data = tokenByteArrayOutputStream.toByteArray();
        decrypt.close();

        // check the hmacsha1
        Mac hmacSHA1 = Mac.getInstance("HmacSHA1");
        hmacSHA1.init(sessionKey);
        if (!Arrays.equals(checksum, hmacSHA1.doFinal(data))) {
            throw new GeneralSecurityException("Invalid token, checksum mismatch.");
        }

        // create all the key-value pairs
        for (int i = 0, start = 0; (i = indexOf(SEMI_COLON, data, i)) != -1;) {
            i++;
            if (i < data.length && data[i] == SEMI_COLON) {
                i++;
                continue;
            }
            byte[] keyValuePairArray = new byte[i - start];
            System.arraycopy(data, start, keyValuePairArray, 0, keyValuePairArray.length);
            KeyValuePair kvp = new KeyValuePair(keyValuePairArray);
            kv.put(new String(kvp.key(), "US-ASCII"), kvp);
            start = i;
        }
    } catch (IOException ioe) {
        /* should never happen as it's a ByteArrayInputStream */
        ioe.printStackTrace();
    }
    valid = true;

    // create the Stringifier to use
    stringifier = new WebauthTokenStringifier();
}

From source file:org.apache.cassandra.db.SuperColumn.java

public IColumn deserialize(DataInputStream dis, String name, IFilter filter) throws IOException {
    if (dis.available() == 0)
        return null;

    String[] names = RowMutation.getColumnAndColumnFamily(name);
    if (names.length == 1) {
        IColumn superColumn = defreezeSuperColumn(dis);
        if (name.equals(superColumn.name())) {
            /* read the number of columns stored */
            int size = dis.readInt();
            /* read the size of all columns */
            dis.readInt();//from w  w  w  . jav  a2s . c o m
            IColumn column = null;
            for (int i = 0; i < size; ++i) {
                column = Column.serializer().deserialize(dis, filter);
                if (column != null) {
                    superColumn.addColumn(column.name(), column);
                    column = null;
                    if (filter.isDone()) {
                        break;
                    }
                }
            }
            return superColumn;
        } else {
            /* read the number of columns stored */
            dis.readInt();
            /* read the size of all columns to skip */
            int size = dis.readInt();
            dis.skip(size);
            return null;
        }
    }

    SuperColumn superColumn = defreezeSuperColumn(dis);
    if (!superColumn.isMarkedForDelete()) {
        int size = dis.readInt();
        /* skip the size of the columns */
        dis.readInt();
        if (size > 0) {
            for (int i = 0; i < size; ++i) {
                IColumn subColumn = Column.serializer().deserialize(dis, names[1], filter);
                if (subColumn != null) {
                    superColumn.addColumn(subColumn.name(), subColumn);
                    break;
                }
            }
        }
    }
    return superColumn;
}