List of usage examples for java.io DataInputStream readInt
public final int readInt() throws IOException
readInt
method of DataInput
. From source file:org.carbondata.processing.globalsurrogategenerator.LevelGlobalSurrogateGeneratorThread.java
public static Map<String, Integer> readLevelFileAndUpdateCache(CarbonFile memberFile) throws IOException { DataInputStream inputStream = null; Map<String, Integer> localMemberMap = new HashMap<String, Integer>( CarbonCommonConstants.DEFAULT_COLLECTION_SIZE); try {// w ww . ja v a 2 s. com inputStream = FileFactory.getDataInputStream(memberFile.getPath(), FileFactory.getFileType(memberFile.getPath())); long currentPosition = 4; long size = memberFile.getSize() - 4; boolean enableEncoding = Boolean.valueOf( CarbonProperties.getInstance().getProperty(CarbonCommonConstants.ENABLE_BASE64_ENCODING, CarbonCommonConstants.ENABLE_BASE64_ENCODING_DEFAULT)); int surrogateValue = inputStream.readInt(); while (currentPosition < size) { int len = inputStream.readInt(); currentPosition += 4; byte[] rowBytes = new byte[len]; inputStream.readFully(rowBytes); currentPosition += len; String decodedValue = null; if (enableEncoding) { decodedValue = new String(Base64.decodeBase64(rowBytes), Charset.defaultCharset()); } else { decodedValue = new String(rowBytes, Charset.defaultCharset()); } localMemberMap.put(decodedValue, surrogateValue); surrogateValue++; } } catch (Exception e) { LOGGER.error(e, e.getMessage()); CarbonUtil.closeStreams(inputStream); } finally { CarbonUtil.closeStreams(inputStream); } return localMemberMap; }
From source file:com.vimukti.accounter.license.LicensePair.java
public LicensePair(String contactLicenseText) throws LicenseException { this.originalLicenseString = contactLicenseText; if (!new LicenseManager().canDecode(contactLicenseText)) { return;/*from w w w. j av a 2s .c o m*/ } try { byte[] decodedBytes = Base64.decodeBase64(contactLicenseText.getBytes()); ByteArrayInputStream in = new ByteArrayInputStream(decodedBytes); DataInputStream dIn = new DataInputStream(in); int textLength = dIn.readInt(); this.licenseText = new byte[textLength]; dIn.read(licenseText); this.hash = new byte[dIn.available()]; dIn.read(hash); } catch (IOException e) { throw new LicenseException(e); } }
From source file:additionalpipes.inventory.components.PropertyIntArray.java
@Override public void readData(DataInputStream data) throws IOException { int size = data.readInt(); if (size > 0) { value = new int[size]; for (int i = 0; i < value.length; i++) { value[i] = data.readInt();//from www. j a va 2 s .co m } } else { value = ArrayUtils.EMPTY_INT_ARRAY; } }
From source file:org.apache.hadoop.hdfs.protocol.datatransfer.DataTransferEncryptor.java
/** * Factory method for DNs, where the nonce, keyId, and encryption key are not * yet known. The nonce and keyId will be sent by the client, and the DN * will then use those pieces of info and the secret key shared with the NN * to determine the encryptionKey used for the SASL handshake/encryption. * /* www .ja v a 2 s.co m*/ * Establishes a secure connection assuming that the party on the other end * has the same shared secret. This does a SASL connection handshake, but not * a general-purpose one. It's specific to the MD5-DIGEST SASL mechanism with * auth-conf enabled. In particular, it doesn't support an arbitrary number of * challenge/response rounds, and we know that the client will never have an * initial response, so we don't check for one. * * @param underlyingOut output stream to write to the other party * @param underlyingIn input stream to read from the other party * @param blockPoolTokenSecretManager secret manager capable of constructing * encryption key based on keyId, blockPoolId, and nonce * @return a pair of streams which wrap the given streams and encrypt/decrypt * all data read/written * @throws IOException in the event of error */ public static IOStreamPair getEncryptedStreams(OutputStream underlyingOut, InputStream underlyingIn, BlockPoolTokenSecretManager blockPoolTokenSecretManager, String encryptionAlgorithm) throws IOException { DataInputStream in = new DataInputStream(underlyingIn); DataOutputStream out = new DataOutputStream(underlyingOut); Map<String, String> saslProps = Maps.newHashMap(SASL_PROPS); saslProps.put("com.sun.security.sasl.digest.cipher", encryptionAlgorithm); if (LOG.isDebugEnabled()) { LOG.debug("Server using encryption algorithm " + encryptionAlgorithm); } SaslParticipant sasl = new SaslParticipant(Sasl.createSaslServer(MECHANISM, PROTOCOL, SERVER_NAME, saslProps, new SaslServerCallbackHandler(blockPoolTokenSecretManager))); int magicNumber = in.readInt(); if (magicNumber != ENCRYPTED_TRANSFER_MAGIC_NUMBER) { throw new InvalidMagicNumberException(magicNumber); } try { // step 1 performSaslStep1(out, in, sasl); // step 2 (server-side only) byte[] remoteResponse = readSaslMessage(in); byte[] localResponse = sasl.evaluateChallengeOrResponse(remoteResponse); sendSaslMessage(out, localResponse); // SASL handshake is complete checkSaslComplete(sasl); return sasl.createEncryptedStreamPair(out, in); } catch (IOException ioe) { if (ioe instanceof SaslException && ioe.getCause() != null && ioe.getCause() instanceof InvalidEncryptionKeyException) { // This could just be because the client is long-lived and hasn't gotten // a new encryption key from the NN in a while. Upon receiving this // error, the client will get a new encryption key from the NN and retry // connecting to this DN. sendInvalidKeySaslErrorMessage(out, ioe.getCause().getMessage()); } else { sendGenericSaslErrorMessage(out, ioe.getMessage()); } throw ioe; } }
From source file:org.spout.engine.filesystem.WorldFiles.java
public static void readColumn(InputStream in, SpoutColumn column, AtomicInteger lowestY, BlockMaterial[][] topmostBlocks) { if (in == null) { //The inputstream is null because no height map data exists for (int x = 0; x < SpoutColumn.BLOCKS.SIZE; x++) { for (int z = 0; z < SpoutColumn.BLOCKS.SIZE; z++) { column.getAtomicInteger(x, z).set(Integer.MIN_VALUE); topmostBlocks[x][z] = null; column.setDirty(x, z);//from ww w . ja va 2s . c o m } } lowestY.set(Integer.MAX_VALUE); return; } DataInputStream dataStream = new DataInputStream(in); try { for (int x = 0; x < SpoutColumn.BLOCKS.SIZE; x++) { for (int z = 0; z < SpoutColumn.BLOCKS.SIZE; z++) { column.getAtomicInteger(x, z).set(dataStream.readInt()); } } @SuppressWarnings("unused") int version = dataStream.readInt(); lowestY.set(dataStream.readInt()); //Save heightmap StringMap global = ((SpoutEngine) Spout.getEngine()).getEngineItemMap(); StringMap itemMap = column.getWorld().getItemMap(); boolean warning = false; for (int x = 0; x < SpoutColumn.BLOCKS.SIZE; x++) { for (int z = 0; z < SpoutColumn.BLOCKS.SIZE; z++) { if (!dataStream.readBoolean()) { continue; } int blockState = dataStream.readInt(); short blockId = BlockFullState.getId(blockState); short blockData = BlockFullState.getData(blockState); blockId = (short) itemMap.convertTo(global, blockId); blockState = BlockFullState.getPacked(blockId, blockData); BlockMaterial m; try { m = (BlockMaterial) MaterialRegistry.get(blockState); } catch (ClassCastException e) { m = null; if (!warning) { Spout.getLogger().severe( "Error reading column topmost block information, block was not a valid BlockMaterial"); warning = false; } } if (m == null) { column.setDirty(x, z); } topmostBlocks[x][z] = m; } } //Save Biomes BiomeManager manager = null; try { //Biome manager is serialized with: // - boolean, if a biome manager exists // - String, the class name // - int, the number of bytes of data to read // - byte[], size of the above int in length boolean exists = dataStream.readBoolean(); if (exists) { String biomeManagerClass = dataStream.readUTF(); int biomeSize = dataStream.readInt(); byte[] biomes = new byte[biomeSize]; dataStream.readFully(biomes); //Attempt to create the biome manager class from the class name @SuppressWarnings("unchecked") Class<? extends BiomeManager> clazz = (Class<? extends BiomeManager>) Class .forName(biomeManagerClass); Class<?>[] params = { int.class, int.class }; manager = clazz.getConstructor(params).newInstance(column.getX(), column.getZ()); manager.deserialize(biomes); column.setBiomeManager(manager); } } catch (Exception e) { Spout.getLogger().log(Level.SEVERE, "Failed to read biome data for column", e); } } catch (IOException e) { Spout.getLogger() .severe("Error reading column height-map for column" + column.getX() + ", " + column.getZ()); } }
From source file:org.structr.core.graph.SyncCommand.java
public static byte[] deserializeData(final DataInputStream inputStream) throws IOException { final int len = inputStream.readInt(); final byte[] buffer = new byte[len]; inputStream.read(buffer, 0, len);//www . ja va 2 s . c o m return buffer; }
From source file:com.facebook.infrastructure.db.CommitLogEntry.java
public CommitLogEntry deserialize(DataInputStream dis) throws IOException { byte[] value = new byte[dis.readInt()]; dis.readFully(value);/*from www . jav a2 s . c o m*/ return new CommitLogEntry(value); }
From source file:com.bigdata.dastor.db.ReadResponse.java
public ReadResponse deserialize(DataInputStream dis) throws IOException { int digestSize = dis.readInt(); byte[] digest = new byte[digestSize]; dis.read(digest, 0, digestSize);/*from w w w .j av a 2s .c o m*/ boolean isDigest = dis.readBoolean(); Row row = null; if (!isDigest) { row = Row.serializer().deserialize(dis); } ReadResponse rmsg = isDigest ? new ReadResponse(digest) : new ReadResponse(row); rmsg.setIsDigestQuery(isDigest); return rmsg; }
From source file:com.google.gwt.dev.javac.CachedCompilationUnit.java
public static CachedCompilationUnit load(InputStream inputStream, JsProgram jsProgram) throws Exception { DataInputStream dis = new DataInputStream(new BufferedInputStream(inputStream)); try {// ww w . jav a2s. co m CachedCompilationUnit compilationUnit = new CachedCompilationUnit(); // version long version = dis.readLong(); if (version != CompilationUnitDiskCache.CACHE_VERSION) { return null; } // some simple stuff :) compilationUnit.m_lastModified = dis.readLong(); compilationUnit.m_displayLocation = dis.readUTF(); compilationUnit.m_typeName = dis.readUTF(); compilationUnit.m_contentId = new ContentId(dis.readUTF()); compilationUnit.m_isSuperSource = dis.readBoolean(); // compiled classes { int size = dis.readInt(); compilationUnit.m_compiledClasses = new ArrayList<CompiledClass>(size); for (int i = 0; i < size; ++i) { // internal name String internalName = dis.readUTF(); // is local boolean isLocal = dis.readBoolean(); // bytes int byteSize = dis.readInt(); byte[] bytes = new byte[byteSize]; dis.readFully(bytes); // enclosing class CompiledClass enclosingClass = null; String enclosingClassName = dis.readUTF(); if (!StringUtils.isEmpty(enclosingClassName)) { for (CompiledClass cc : compilationUnit.m_compiledClasses) { if (enclosingClassName.equals(cc.getInternalName())) { enclosingClass = cc; break; } } } // some assertion if (!StringUtils.isEmpty(enclosingClassName) && enclosingClass == null) { throw new IllegalStateException("Can't find the enclosing class \"" + enclosingClassName + "\" for \"" + internalName + "\""); } // init unit CompiledClass cc = new CompiledClass(internalName, bytes, isLocal, enclosingClass); cc.initUnit(compilationUnit); compilationUnit.m_compiledClasses.add(cc); } } // dependencies { compilationUnit.m_dependencies = new HashSet<ContentId>(); int size = dis.readInt(); if (size > 0) { for (int i = 0; i < size; i++) { compilationUnit.m_dependencies.add(new ContentId(dis.readUTF())); } } } // JSNI methods { compilationUnit.m_jsniMethods = new ArrayList<JsniMethod>(); int size = dis.readInt(); if (size > 0) { for (int i = 0; i < size; i++) { String name = dis.readUTF(); int startPos = dis.readInt(); int endPos = dis.readInt(); int startLine = dis.readInt(); String source = dis.readUTF(); String fileName = compilationUnit.m_displayLocation; SourceInfo jsInfo = SourceOrigin.create(startPos, endPos, startLine, fileName); compilationUnit.m_jsniMethods .add(JsniCollector.restoreJsniMethod(name, source, jsInfo, jsProgram)); } } } // Method lookup { compilationUnit.m_methodArgs = MethodArgNamesLookup.load(dis); } return compilationUnit; } finally { IOUtils.closeQuietly(dis); } }
From source file:flens.input.SocketInput.java
@Override public void readAndProcess(Pair<String, DataInputStream> inx) throws IOException { DataInputStream in = inx.getRight(); String host = inx.getLeft();//from w w w . jav a 2s. co m int line = in.readInt(); byte[] block = new byte[line]; IOUtils.readFully(in, block); Map<String, Object> values = new HashMap<String, Object>(); values.put(Constants.BODY, block); Record r = Record.createWithTimeHostAndValues(System.currentTimeMillis(), host, values); dispatch(r); }