List of usage examples for java.io DataInputStream readBoolean
public final boolean readBoolean() throws IOException
readBoolean
method of DataInput
. From source file:ch.unil.genescore.vegas.Snp.java
public void readPosAndMinorAllele(DataInputStream is) throws IOException, DataInconsistencyException { //TODO: Only solves homegrown case atm; // @David I changed this to an IllegalArgumentException because the other one was unknown on my system // id_ is already read //String curChr = chr_; //int curStart = start_; //int curEnd = end_; chr_ = is.readUTF();/*from w w w . j av a 2 s .c om*/ start_ = is.readInt(); end_ = is.readInt(); //if (curChr != null || curStart != -1 || curEnd != -1){ // if (!chr_.equals(curChr) || start_ != curStart || end_ != curEnd){ // throw new RuntimeException("snp seems to have been set before to another value"); //} //} posStrand_ = is.readBoolean(); minorAllele_ = is.readChar(); }
From source file:ch.unil.genescore.vegas.Snp.java
public void readPosAndAllele(DataInputStream is) throws IOException, DataInconsistencyException { //TODO: Only solves homegrown case atm; // @David I changed this to an IllegalArgumentException because the other one was unknown on my system // id_ is already read //String curChr = chr_; //int curStart = start_; //int curEnd = end_; chr_ = is.readUTF();/* w w w .j av a 2s.c o m*/ start_ = is.readInt(); end_ = is.readInt(); //if (curChr != null || curStart != -1 || curEnd != -1){ // if (!chr_.equals(curChr) || start_ != curStart || end_ != curEnd){ // throw new RuntimeException("snp seems to have been set before to another value"); //} //} posStrand_ = is.readBoolean(); char minorAllele = is.readChar(); boolean snpHasBeenSeenInGWAS = false; if (minorAllele_ != 'N' || majorAllele_ != 'N') { snpHasBeenSeenInGWAS = true; } if (Pascal.set.withZScore_ && minorAllele_ != minorAllele) { if (minorAllele != majorAllele_ && snpHasBeenSeenInGWAS) { throw new DataInconsistencyException( "different minor allele of reference population not found GWAS data. Snp left out."); } zscore_ *= -1; char minorAlleleSummaryFile = minorAllele_; char majorAlleleSummaryFile = majorAllele_; minorAllele_ = majorAlleleSummaryFile; majorAllele_ = minorAlleleSummaryFile; } }
From source file:org.apache.jxtadoop.hdfs.server.datanode.DataXceiver.java
/** * Write a block to disk./* ww w . j a v a2 s .c om*/ * * @param in The stream to read from * @throws IOException */ private void writeBlock(DataInputStream in) throws IOException { LOG.debug("Mathod called : writeBlock()"); DatanodeInfo srcDataNode = null; LOG.debug("writeBlock receive buf size " + s.getReceiveBufferSize() + " tcp no delay " + s.getTcpNoDelay()); // // Read in the header // Block block = new Block(in.readLong(), dataXceiverServer.estimateBlockSize, in.readLong()); LOG.info("Receiving block " + block + " src: " + remoteAddress + " dest: " + localAddress); int pipelineSize = in.readInt(); // num of datanodes in entire pipeline boolean isRecovery = in.readBoolean(); // is this part of recovery? String client = Text.readString(in); // working on behalf of this client boolean hasSrcDataNode = in.readBoolean(); // is src node info present if (hasSrcDataNode) { srcDataNode = new DatanodeInfo(); srcDataNode.readFields(in); } int numTargets = in.readInt(); if (numTargets < 0) { throw new IOException("Mislabelled incoming datastream."); } DatanodeInfo targets[] = new DatanodeInfo[numTargets]; for (int i = 0; i < targets.length; i++) { DatanodeInfo tmp = new DatanodeInfo(); tmp.readFields(in); targets[i] = tmp; } DataOutputStream mirrorOut = null; // stream to next target DataInputStream mirrorIn = null; // reply from next target DataOutputStream replyOut = null; // stream to prev target JxtaSocket mirrorSock = null; // socket to next target BlockReceiver blockReceiver = null; // responsible for data handling String mirrorNode = null; // the name:port of next target String firstBadLink = ""; // first datanode that failed in connection setup try { // open a block receiver and check if the block does not exist /*blockReceiver = new BlockReceiver(block, in, s.getRemoteSocketAddress().toString(), s.getLocalSocketAddress().toString(), isRecovery, client, srcDataNode, datanode);*/ blockReceiver = new BlockReceiver(block, in, ((JxtaSocketAddress) s.getRemoteSocketAddress()).getPeerId().toString(), ((JxtaSocketAddress) s.getLocalSocketAddress()).getPeerId().toString(), isRecovery, client, srcDataNode, datanode); // get a connection back to the previous target //replyOut = new DataOutputStream( // NetUtils.getOutputStream(s, datanode.socketWriteTimeout)); ReliableOutputStream replyOutRos = (ReliableOutputStream) s.getOutputStream(); replyOut = new DataOutputStream(replyOutRos); // // Open network conn to backup machine, if // appropriate // if (targets.length > 0) { // JxtaSocketAddress mirrorTarget = null; // Connect to backup machine mirrorNode = targets[0].getPeerId(); // mirrorTarget = NetUtils.createSocketAddr(mirrorNode); // mirrorSock = datanode.newSocket(); try { //int timeoutValue = numTargets * datanode.socketTimeout; //int writeTimeout = datanode.socketWriteTimeout + // (HdfsConstants.WRITE_TIMEOUT_EXTENSION * numTargets); // NetUtils.connect(mirrorSock, mirrorTarget, timeoutValue); mirrorSock = datanode.getDnPeer().getInfoSocket(mirrorNode.toString()); if (mirrorSock == null) throw new IOException("Failed to get a mirror socket"); //mirrorSock.setSoTimeout(timeoutValue); //mirrorSock.setTcpNoDelay(true); //mirrorSock.setSoTimeout(Integer.parseInt(datanode.getConf().get("hadoop.p2p.info.timeout"))); //mirrorSock.setSendBufferSize(DEFAULT_DATA_SOCKET_SIZE); /*mirrorOut = new DataOutputStream( new BufferedOutputStream( NetUtils.getOutputStream(mirrorSock, writeTimeout), SMALL_BUFFER_SIZE)); mirrorIn = new DataInputStream(NetUtils.getInputStream(mirrorSock)); */ mirrorOut = new DataOutputStream((ReliableOutputStream) mirrorSock.getOutputStream()); mirrorIn = new DataInputStream((ReliableInputStream) mirrorSock.getInputStream()); // Write header: Copied from DFSClient.java! mirrorOut.writeShort(DataTransferProtocol.DATA_TRANSFER_VERSION); mirrorOut.write(DataTransferProtocol.OP_WRITE_BLOCK); mirrorOut.writeLong(block.getBlockId()); mirrorOut.writeLong(block.getGenerationStamp()); mirrorOut.writeInt(pipelineSize); mirrorOut.writeBoolean(isRecovery); Text.writeString(mirrorOut, client); mirrorOut.writeBoolean(hasSrcDataNode); if (hasSrcDataNode) { // pass src node information srcDataNode.write(mirrorOut); } mirrorOut.writeInt(targets.length - 1); for (int i = 1; i < targets.length; i++) { targets[i].write(mirrorOut); } blockReceiver.writeChecksumHeader(mirrorOut); mirrorOut.flush(); // read connect ack (only for clients, not for replication req) if (client.length() != 0) { firstBadLink = Text.readString(mirrorIn); if (LOG.isDebugEnabled() || firstBadLink.length() > 0) { LOG.info("Datanode " + targets.length + " got response for connect ack " + " from downstream datanode with firstbadlink as " + firstBadLink); } } } catch (SocketTimeoutException ste) { LOG.debug("Time out while receiving data on DataXceiver"); LOG.debug(ste); ste.printStackTrace(); } catch (IOException e) { LOG.debug("IOException occurred : " + e.getMessage()); if (client.length() != 0) { Text.writeString(replyOut, mirrorNode); replyOut.flush(); } IOUtils.closeStream(mirrorOut); mirrorOut = null; IOUtils.closeStream(mirrorIn); mirrorIn = null; if (mirrorSock != null) { IOUtils.closeSocket(mirrorSock); mirrorSock = null; } if (client.length() > 0) { throw e; } else { LOG.info(datanode.dnRegistration + ":Exception transfering block " + block + " to mirror " + mirrorNode + ". continuing without the mirror.\n" + StringUtils.stringifyException(e)); } } } // send connect ack back to source (only for clients) if (client.length() != 0) { if (LOG.isDebugEnabled() || firstBadLink.length() > 0) { LOG.info("Datanode " + targets.length + " forwarding connect ack to upstream firstbadlink is " + firstBadLink); } Text.writeString(replyOut, firstBadLink); replyOut.flush(); } // receive the block and mirror to the next target String mirrorAddr = (mirrorSock == null) ? null : mirrorNode; blockReceiver.receiveBlock(mirrorOut, mirrorIn, replyOut, mirrorAddr, null, targets.length); // if this write is for a replication request (and not // from a client), then confirm block. For client-writes, // the block is finalized in the PacketResponder. if (client.length() == 0) { datanode.notifyNamenodeReceivedBlock(block, DataNode.EMPTY_DEL_HINT); LOG.info("Received block " + block + " src: " + remoteAddress + " dest: " + localAddress + " of size " + block.getNumBytes()); } if (datanode.blockScanner != null) { datanode.blockScanner.addBlock(block); } } catch (IOException ioe) { LOG.info("writeBlock " + block + " received exception " + ioe); throw ioe; } catch (Exception e) { LOG.warn("Exception occurred in writting block : " + e.getMessage()); } finally { // close all opened streams LOG.debug("Finalizing : writeBlock()"); IOUtils.closeStream(mirrorOut); IOUtils.closeStream(mirrorIn); IOUtils.closeStream(replyOut); IOUtils.closeSocket(mirrorSock); IOUtils.closeStream(blockReceiver); } }
From source file:org.apache.hadoop.hive.ql.exec.tez.DynamicPartitionPruner.java
@SuppressWarnings("deprecation") @VisibleForTesting/*from www. j a v a2 s . co m*/ protected String processPayload(ByteBuffer payload, String sourceName) throws SerDeException, IOException { DataInputStream in = new DataInputStream(new ByteBufferBackedInputStream(payload)); try { String columnName = in.readUTF(); LOG.info("Source of event: " + sourceName); List<SourceInfo> infos = this.sourceInfoMap.get(sourceName); if (infos == null) { throw new IllegalStateException("no source info for event source: " + sourceName); } SourceInfo info = null; for (SourceInfo si : infos) { if (columnName.equals(si.columnName)) { info = si; break; } } if (info == null) { throw new IllegalStateException("no source info for column: " + columnName); } if (info.skipPruning.get()) { // Marked as skipped previously. Don't bother processing the rest of the payload. } else { boolean skip = in.readBoolean(); if (skip) { info.skipPruning.set(true); } else { while (payload.hasRemaining()) { writable.readFields(in); Object row = info.deserializer.deserialize(writable); Object value = info.soi.getStructFieldData(row, info.field); value = ObjectInspectorUtils.copyToStandardObject(value, info.fieldInspector); if (LOG.isDebugEnabled()) { LOG.debug("Adding: " + value + " to list of required partitions"); } info.values.add(value); } } } } finally { if (in != null) { in.close(); } } return sourceName; }
From source file:MersenneTwisterFast.java
/** Reads the entire state of the MersenneTwister RNG from the stream * @param stream input stream/*w w w . j a va2 s . com*/ * @throws IOException exception from stream reading */ public void readState(DataInputStream stream) throws IOException { int len = mt.length; for (int x = 0; x < len; x++) mt[x] = stream.readInt(); len = mag01.length; for (int x = 0; x < len; x++) mag01[x] = stream.readInt(); mti = stream.readInt(); __nextNextGaussian = stream.readDouble(); __haveNextNextGaussian = stream.readBoolean(); }
From source file:org.apache.geode.internal.cache.tier.sockets.HandShake.java
public static Properties readCredentials(DataInputStream dis, DataOutputStream dos, DistributedSystem system, SecurityService securityService) throws GemFireSecurityException, IOException { boolean requireAuthentication = securityService.isClientSecurityRequired(); Properties credentials = null; try {/*from ww w . j ava 2s . c o m*/ byte secureMode = dis.readByte(); throwIfMissingRequiredCredentials(requireAuthentication, secureMode != CREDENTIALS_NONE); if (secureMode == CREDENTIALS_NORMAL) { if (requireAuthentication) { credentials = DataSerializer.readProperties(dis); } else { DataSerializer.readProperties(dis); // ignore the credentials } } else if (secureMode == CREDENTIALS_DHENCRYPT) { boolean sendAuthentication = dis.readBoolean(); InternalLogWriter securityLogWriter = (InternalLogWriter) system.getSecurityLogWriter(); // Get the symmetric encryption algorithm to be used String skAlgo = DataSerializer.readString(dis); // Get the public key of the other side byte[] keyBytes = DataSerializer.readByteArray(dis); byte[] challenge = null; PublicKey pubKey = null; if (requireAuthentication) { // Generate PublicKey from encoded form X509EncodedKeySpec x509KeySpec = new X509EncodedKeySpec(keyBytes); KeyFactory keyFact = KeyFactory.getInstance("DH"); pubKey = keyFact.generatePublic(x509KeySpec); // Send the public key to other side keyBytes = dhPublicKey.getEncoded(); challenge = new byte[64]; random.nextBytes(challenge); // If the server has to also authenticate itself then // sign the challenge from client. if (sendAuthentication) { // Get the challenge string from client byte[] clientChallenge = DataSerializer.readByteArray(dis); if (privateKeyEncrypt == null) { throw new AuthenticationFailedException( LocalizedStrings.HandShake_SERVER_PRIVATE_KEY_NOT_AVAILABLE_FOR_CREATING_SIGNATURE .toLocalizedString()); } // Sign the challenge from client and send it to the client Signature sig = Signature.getInstance(privateKeySignAlgo); sig.initSign(privateKeyEncrypt); sig.update(clientChallenge); byte[] signedBytes = sig.sign(); dos.writeByte(REPLY_OK); DataSerializer.writeByteArray(keyBytes, dos); // DataSerializer.writeString(privateKeyAlias, dos); DataSerializer.writeString(privateKeySubject, dos); DataSerializer.writeByteArray(signedBytes, dos); securityLogWriter.fine("HandShake: sent the signed client challenge"); } else { // These two lines should not be moved before the if{} statement in // a common block for both if...then...else parts. This is to handle // the case when an AuthenticationFailedException is thrown by the // if...then part when sending the signature. dos.writeByte(REPLY_OK); DataSerializer.writeByteArray(keyBytes, dos); } // Now send the server challenge DataSerializer.writeByteArray(challenge, dos); securityLogWriter.fine("HandShake: sent the public key and challenge"); dos.flush(); // Read and decrypt the credentials byte[] encBytes = DataSerializer.readByteArray(dis); KeyAgreement ka = KeyAgreement.getInstance("DH"); ka.init(dhPrivateKey); ka.doPhase(pubKey, true); Cipher decrypt; int keysize = getKeySize(skAlgo); int blocksize = getBlockSize(skAlgo); if (keysize == -1 || blocksize == -1) { SecretKey sKey = ka.generateSecret(skAlgo); decrypt = Cipher.getInstance(skAlgo); decrypt.init(Cipher.DECRYPT_MODE, sKey); } else { String algoStr = getDhAlgoStr(skAlgo); byte[] sKeyBytes = ka.generateSecret(); SecretKeySpec sks = new SecretKeySpec(sKeyBytes, 0, keysize, algoStr); IvParameterSpec ivps = new IvParameterSpec(sKeyBytes, keysize, blocksize); decrypt = Cipher.getInstance(algoStr + "/CBC/PKCS5Padding"); decrypt.init(Cipher.DECRYPT_MODE, sks, ivps); } byte[] credentialBytes = decrypt.doFinal(encBytes); ByteArrayInputStream bis = new ByteArrayInputStream(credentialBytes); DataInputStream dinp = new DataInputStream(bis); credentials = DataSerializer.readProperties(dinp); byte[] challengeRes = DataSerializer.readByteArray(dinp); // Check the challenge string if (!Arrays.equals(challenge, challengeRes)) { throw new AuthenticationFailedException( LocalizedStrings.HandShake_MISMATCH_IN_CHALLENGE_BYTES_MALICIOUS_CLIENT .toLocalizedString()); } dinp.close(); } else { if (sendAuthentication) { // Read and ignore the client challenge DataSerializer.readByteArray(dis); } dos.writeByte(REPLY_AUTH_NOT_REQUIRED); dos.flush(); } } else if (secureMode == SECURITY_MULTIUSER_NOTIFICATIONCHANNEL) { // hitesh there will be no credential CCP will get credential(Principal) using // ServerConnection.. logger.debug("readCredential where multiuser mode creating callback connection"); } } catch (IOException ex) { throw ex; } catch (GemFireSecurityException ex) { throw ex; } catch (Exception ex) { throw new AuthenticationFailedException( LocalizedStrings.HandShake_FAILURE_IN_READING_CREDENTIALS.toLocalizedString(), ex); } return credentials; }
From source file:org.apache.hadoop.hdfs.server.namenode.AvatarNode.java
private void verifyFailoverTestData() throws IOException { if (!enableTestFramework) { LOG.info("Failover: Test framework - disabled"); return;//from www. ja v a2 s .c o m } String fsck = ""; LOG.info("Failover: Test framework - verification - starting..."); AvatarFailoverSnapshot snapshot = new AvatarFailoverSnapshot(); File snapshotFile = getSnapshotFile(confg, false); if (snapshotFile == null) return; DataInputStream in = new DataInputStream(new BufferedInputStream(new FileInputStream(snapshotFile))); try { snapshot.readFields(in); if (in.readBoolean()) { LOG.info("Failover: Test framework - found fsck data"); fsck = Text.readString(in); } } finally { in.close(); } LOG.info("Failover: Test framework - verifying open files: found " + snapshot.getOpenFilesInfo().getOpenFiles().size() + " files in the test snapshot"); verifyOpenFiles(snapshot.getOpenFilesInfo()); LOG.info("Failover: Test framework - verifying closed files: found " + snapshot.getSampledFiles().size() + " files in the test snapshot"); for (FileStatusExtended stat : snapshot.getSampledFiles()) { verifySnapshotSampledFile(stat); } LOG.info("Failover: Test framework - verification - succeeded"); this.oldPrimaryFsck = fsck; }
From source file:org.apache.jackrabbit.core.persistence.bundle.util.BundleBinding.java
/** * Deserializes a <code>NodePropBundle</code> from a data input stream. * * @param in the input stream//from w ww. j av a2s . c o m * @param id the node id for the new bundle * @return the bundle * @throws IOException if an I/O error occurs. */ public NodePropBundle readBundle(DataInputStream in, NodeId id) throws IOException { NodePropBundle bundle = new NodePropBundle(this, id); // read version and primary type...special handling int index = in.readInt(); // get version int version = (index >> 24) & 0xff; index &= 0x00ffffff; String uri = nsIndex.indexToString(index); String local = nameIndex.indexToString(in.readInt()); Name nodeTypeName = NameFactoryImpl.getInstance().create(uri, local); // primaryType bundle.setNodeTypeName(nodeTypeName); // parentUUID bundle.setParentId(readID(in)); // definitionId in.readUTF(); // mixin types Set mixinTypeNames = new HashSet(); Name name = readIndexedQName(in); while (name != null) { mixinTypeNames.add(name); name = readIndexedQName(in); } bundle.setMixinTypeNames(mixinTypeNames); // properties name = readIndexedQName(in); while (name != null) { PropertyId pId = new PropertyId(bundle.getId(), name); // skip redundant primaryType, mixinTypes and uuid properties if (name.equals(NameConstants.JCR_PRIMARYTYPE) || name.equals(NameConstants.JCR_MIXINTYPES) || name.equals(NameConstants.JCR_UUID)) { readPropertyEntry(in, pId); name = readIndexedQName(in); continue; } NodePropBundle.PropertyEntry pState = readPropertyEntry(in, pId); bundle.addProperty(pState); name = readIndexedQName(in); } // set referenceable flag bundle.setReferenceable(in.readBoolean()); // child nodes (list of uuid/name pairs) NodeId childId = readID(in); while (childId != null) { bundle.addChildNodeEntry(readQName(in), childId); childId = readID(in); } // read modcount, since version 1.0 if (version >= VERSION_1) { bundle.setModCount(readModCount(in)); } // read shared set, since version 2.0 Set sharedSet = new HashSet(); if (version >= VERSION_2) { // shared set (list of parent uuids) NodeId parentId = readID(in); while (parentId != null) { sharedSet.add(parentId); parentId = readID(in); } } bundle.setSharedSet(sharedSet); return bundle; }
From source file:org.apache.jackrabbit.core.persistence.bundle.util.BundleBinding.java
/** * Deserializes a <code>NodePropBundle</code> from a data input stream. * * @param in the input stream/*from w ww . j ava 2 s .c o m*/ * @param id the node id for the new bundle * @return the bundle * @throws IOException if an I/O error occurs. */ public NodePropBundle readBundle(DataInputStream in, NodeId id) throws IOException { NodePropBundle bundle = new NodePropBundle(this, id); // read version and primary type...special handling int index = in.readInt(); // get version int version = (index >> 24) & 0xff; index &= 0x00ffffff; String uri = nsIndex.indexToString(index); String local = nameIndex.indexToString(in.readInt()); Name nodeTypeName = NameFactoryImpl.getInstance().create(uri, local); // primaryType bundle.setNodeTypeName(nodeTypeName); // parentUUID bundle.setParentId(readID(in)); // definitionId bundle.setNodeDefId(NodeDefId.valueOf(in.readUTF())); // mixin types Set<Name> mixinTypeNames = new HashSet<Name>(); Name name = readIndexedQName(in); while (name != null) { mixinTypeNames.add(name); name = readIndexedQName(in); } bundle.setMixinTypeNames(mixinTypeNames); // properties name = readIndexedQName(in); while (name != null) { PropertyId pId = new PropertyId(bundle.getId(), name); // skip redundant primaryType, mixinTypes and uuid properties if (name.equals(NameConstants.JCR_PRIMARYTYPE) || name.equals(NameConstants.JCR_MIXINTYPES) || name.equals(NameConstants.JCR_UUID)) { readPropertyEntry(in, pId); name = readIndexedQName(in); continue; } NodePropBundle.PropertyEntry pState = readPropertyEntry(in, pId); bundle.addProperty(pState); name = readIndexedQName(in); } // set referenceable flag bundle.setReferenceable(in.readBoolean()); // child nodes (list of uuid/name pairs) NodeId childId = readID(in); while (childId != null) { bundle.addChildNodeEntry(readQName(in), childId); childId = readID(in); } // read modcount, since version 1.0 if (version >= VERSION_1) { bundle.setModCount(readModCount(in)); } // read shared set, since version 2.0 Set<NodeId> sharedSet = new HashSet<NodeId>(); if (version >= VERSION_2) { // shared set (list of parent uuids) NodeId parentId = readID(in); while (parentId != null) { sharedSet.add(parentId); parentId = readID(in); } } bundle.setSharedSet(sharedSet); return bundle; }
From source file:org.apache.geode.internal.cache.tier.sockets.HandShake.java
public Properties readCredential(DataInputStream dis, DataOutputStream dos, DistributedSystem system) throws GemFireSecurityException, IOException { Properties credentials = null; boolean requireAuthentication = securityService.isClientSecurityRequired(); try {//from w ww .ja v a2 s . c o m byte secureMode = dis.readByte(); throwIfMissingRequiredCredentials(requireAuthentication, secureMode != CREDENTIALS_NONE); if (secureMode == CREDENTIALS_NORMAL) { this.appSecureMode = CREDENTIALS_NORMAL; /* * if (requireAuthentication) { credentials = DataSerializer.readProperties(dis); } else { * DataSerializer.readProperties(dis); // ignore the credentials } */ } else if (secureMode == CREDENTIALS_DHENCRYPT) { this.appSecureMode = CREDENTIALS_DHENCRYPT; boolean sendAuthentication = dis.readBoolean(); InternalLogWriter securityLogWriter = (InternalLogWriter) system.getSecurityLogWriter(); // Get the symmetric encryption algorithm to be used // String skAlgo = DataSerializer.readString(dis); this.clientSKAlgo = DataSerializer.readString(dis); // Get the public key of the other side byte[] keyBytes = DataSerializer.readByteArray(dis); byte[] challenge = null; // PublicKey pubKey = null; if (requireAuthentication) { // Generate PublicKey from encoded form X509EncodedKeySpec x509KeySpec = new X509EncodedKeySpec(keyBytes); KeyFactory keyFact = KeyFactory.getInstance("DH"); this.clientPublicKey = keyFact.generatePublic(x509KeySpec); // Send the public key to other side keyBytes = dhPublicKey.getEncoded(); challenge = new byte[64]; random.nextBytes(challenge); // If the server has to also authenticate itself then // sign the challenge from client. if (sendAuthentication) { // Get the challenge string from client byte[] clientChallenge = DataSerializer.readByteArray(dis); if (privateKeyEncrypt == null) { throw new AuthenticationFailedException( LocalizedStrings.HandShake_SERVER_PRIVATE_KEY_NOT_AVAILABLE_FOR_CREATING_SIGNATURE .toLocalizedString()); } // Sign the challenge from client and send it to the client Signature sig = Signature.getInstance(privateKeySignAlgo); sig.initSign(privateKeyEncrypt); sig.update(clientChallenge); byte[] signedBytes = sig.sign(); dos.writeByte(REPLY_OK); DataSerializer.writeByteArray(keyBytes, dos); // DataSerializer.writeString(privateKeyAlias, dos); DataSerializer.writeString(privateKeySubject, dos); DataSerializer.writeByteArray(signedBytes, dos); securityLogWriter.fine("HandShake: sent the signed client challenge"); } else { // These two lines should not be moved before the if{} statement in // a common block for both if...then...else parts. This is to handle // the case when an AuthenticationFailedException is thrown by the // if...then part when sending the signature. dos.writeByte(REPLY_OK); DataSerializer.writeByteArray(keyBytes, dos); } // Now send the server challenge DataSerializer.writeByteArray(challenge, dos); securityLogWriter.fine("HandShake: sent the public key and challenge"); dos.flush(); // Read and decrypt the credentials byte[] encBytes = DataSerializer.readByteArray(dis); Cipher c = getDecryptCipher(this.clientSKAlgo, this.clientPublicKey); byte[] credentialBytes = decryptBytes(encBytes, c); ByteArrayInputStream bis = new ByteArrayInputStream(credentialBytes); DataInputStream dinp = new DataInputStream(bis); // credentials = DataSerializer.readProperties(dinp);//Hitesh: we don't send in handshake // now byte[] challengeRes = DataSerializer.readByteArray(dinp); // Check the challenge string if (!Arrays.equals(challenge, challengeRes)) { throw new AuthenticationFailedException( LocalizedStrings.HandShake_MISMATCH_IN_CHALLENGE_BYTES_MALICIOUS_CLIENT .toLocalizedString()); } dinp.close(); } else { if (sendAuthentication) { // Read and ignore the client challenge DataSerializer.readByteArray(dis); } dos.writeByte(REPLY_AUTH_NOT_REQUIRED); dos.flush(); } } } catch (IOException ex) { throw ex; } catch (GemFireSecurityException ex) { throw ex; } catch (Exception ex) { throw new AuthenticationFailedException( LocalizedStrings.HandShake_FAILURE_IN_READING_CREDENTIALS.toLocalizedString(), ex); } return credentials; }