List of usage examples for java.io DataInputStream readUTF
public final String readUTF() throws IOException
readUTF
method of DataInput
. From source file:runtime.starter.MPJYarnClient.java
public void run() throws Exception { Map<String, String> map = System.getenv(); try {//from ww w. ja v a 2s. c o m mpjHomeDir = map.get("MPJ_HOME"); if (mpjHomeDir == null) { throw new Exception("[MPJRun.java]:MPJ_HOME environment found.."); } } catch (Exception exc) { System.out.println("[MPJRun.java]:" + exc.getMessage()); exc.printStackTrace(); return; } // Copy the application master jar to HDFS // Create a local resource to point to the destination jar path FileSystem fs = FileSystem.get(conf); /* Path dataset = new Path(fs.getHomeDirectory(),"/dataset"); FileStatus datasetFile = fs.getFileStatus(dataset); BlockLocation myBlocks [] = fs.getFileBlockLocations(datasetFile,0,datasetFile.getLen()); for(BlockLocation b : myBlocks){ System.out.println("\n--------------------"); System.out.println("Length "+b.getLength()); for(String host : b.getHosts()){ System.out.println("host "+host); } } */ Path source = new Path(mpjHomeDir + "/lib/mpj-app-master.jar"); String pathSuffix = hdfsFolder + "mpj-app-master.jar"; Path dest = new Path(fs.getHomeDirectory(), pathSuffix); if (debugYarn) { logger.info("Uploading mpj-app-master.jar to: " + dest.toString()); } fs.copyFromLocalFile(false, true, source, dest); FileStatus destStatus = fs.getFileStatus(dest); Path wrapperSource = new Path(mpjHomeDir + "/lib/mpj-yarn-wrapper.jar"); String wrapperSuffix = hdfsFolder + "mpj-yarn-wrapper.jar"; Path wrapperDest = new Path(fs.getHomeDirectory(), wrapperSuffix); if (debugYarn) { logger.info("Uploading mpj-yarn-wrapper.jar to: " + wrapperDest.toString()); } fs.copyFromLocalFile(false, true, wrapperSource, wrapperDest); Path userJar = new Path(jarPath); String userJarSuffix = hdfsFolder + "user-code.jar"; Path userJarDest = new Path(fs.getHomeDirectory(), userJarSuffix); if (debugYarn) { logger.info("Uploading user-code.jar to: " + userJarDest.toString()); } fs.copyFromLocalFile(false, true, userJar, userJarDest); YarnConfiguration conf = new YarnConfiguration(); YarnClient yarnClient = YarnClient.createYarnClient(); yarnClient.init(conf); yarnClient.start(); if (debugYarn) { YarnClusterMetrics metrics = yarnClient.getYarnClusterMetrics(); logger.info("\nNodes Information"); logger.info("Number of NM: " + metrics.getNumNodeManagers() + "\n"); List<NodeReport> nodeReports = yarnClient.getNodeReports(NodeState.RUNNING); for (NodeReport n : nodeReports) { logger.info("NodeId: " + n.getNodeId()); logger.info("RackName: " + n.getRackName()); logger.info("Total Memory: " + n.getCapability().getMemory()); logger.info("Used Memory: " + n.getUsed().getMemory()); logger.info("Total vCores: " + n.getCapability().getVirtualCores()); logger.info("Used vCores: " + n.getUsed().getVirtualCores() + "\n"); } } logger.info("Creating server socket at HOST " + serverName + " PORT " + serverPort + " \nWaiting for " + np + " processes to connect..."); // Creating a server socket for incoming connections try { servSock = new ServerSocket(serverPort); infoSock = new ServerSocket(); TEMP_PORT = findPort(infoSock); } catch (Exception e) { e.printStackTrace(); } // Create application via yarnClient YarnClientApplication app = yarnClient.createApplication(); GetNewApplicationResponse appResponse = app.getNewApplicationResponse(); int maxMem = appResponse.getMaximumResourceCapability().getMemory(); if (debugYarn) { logger.info("Max memory capability resources in cluster: " + maxMem); } if (amMem > maxMem) { amMem = maxMem; logger.info("AM memory specified above threshold of cluster " + "Using maximum memory for AM container: " + amMem); } int maxVcores = appResponse.getMaximumResourceCapability().getVirtualCores(); if (debugYarn) { logger.info("Max vCores capability resources in cluster: " + maxVcores); } if (amCores > maxVcores) { amCores = maxVcores; logger.info("AM virtual cores specified above threshold of cluster " + "Using maximum virtual cores for AM container: " + amCores); } // Set up the container launch context for the application master ContainerLaunchContext amContainer = Records.newRecord(ContainerLaunchContext.class); List<String> commands = new ArrayList<String>(); commands.add("$JAVA_HOME/bin/java"); commands.add("-Xmx" + amMem + "m"); commands.add("runtime.starter.MPJAppMaster"); commands.add("--np"); commands.add(String.valueOf(np)); commands.add("--serverName"); commands.add(serverName); //server name commands.add("--ioServerPort"); commands.add(Integer.toString(serverPort)); //server port commands.add("--deviceName"); commands.add(deviceName); //device name commands.add("--className"); commands.add(className); //class name commands.add("--wdir"); commands.add(workingDirectory); //wdir commands.add("--psl"); commands.add(Integer.toString(psl)); //protocol switch limit commands.add("--wireUpPort"); commands.add(String.valueOf(TEMP_PORT)); //for sharing ports & rank commands.add("--wrapperPath"); commands.add(wrapperDest.toString());//MPJYarnWrapper.jar HDFS path commands.add("--userJarPath"); commands.add(userJarDest.toString());//User Jar File HDFS path commands.add("--mpjContainerPriority"); commands.add(mpjContainerPriority);// priority for mpj containers commands.add("--containerMem"); commands.add(containerMem); commands.add("--containerCores"); commands.add(containerCores); if (debugYarn) { commands.add("--debugYarn"); } if (appArgs != null) { commands.add("--appArgs"); for (int i = 0; i < appArgs.length; i++) { commands.add(appArgs[i]); } } amContainer.setCommands(commands); //set commands // Setup local Resource for ApplicationMaster LocalResource appMasterJar = Records.newRecord(LocalResource.class); appMasterJar.setResource(ConverterUtils.getYarnUrlFromPath(dest)); appMasterJar.setSize(destStatus.getLen()); appMasterJar.setTimestamp(destStatus.getModificationTime()); appMasterJar.setType(LocalResourceType.ARCHIVE); appMasterJar.setVisibility(LocalResourceVisibility.APPLICATION); amContainer.setLocalResources(Collections.singletonMap("mpj-app-master.jar", appMasterJar)); // Setup CLASSPATH for ApplicationMaster // Setting up the environment Map<String, String> appMasterEnv = new HashMap<String, String>(); setupAppMasterEnv(appMasterEnv); amContainer.setEnvironment(appMasterEnv); // Set up resource type requirements for ApplicationMaster Resource capability = Records.newRecord(Resource.class); capability.setMemory(amMem); capability.setVirtualCores(amCores); // Finally, set-up ApplicationSubmissionContext for the application ApplicationSubmissionContext appContext = app.getApplicationSubmissionContext(); appContext.setApplicationName(appName); appContext.setAMContainerSpec(amContainer); appContext.setResource(capability); appContext.setQueue(yarnQueue); // queue Priority priority = Priority.newInstance(amPriority); appContext.setPriority(priority); ApplicationId appId = appContext.getApplicationId(); //Adding ShutDown Hook Runtime.getRuntime().addShutdownHook(new KillYarnApp(appId, yarnClient)); // Submit application System.out.println("Submitting Application: " + appContext.getApplicationName() + "\n"); try { isRunning = true; yarnClient.submitApplication(appContext); } catch (Exception exp) { System.err.println("Error Submitting Application"); exp.printStackTrace(); } // np = number of processes , + 1 for Application Master container IOMessagesThread[] ioThreads = new IOMessagesThread[np + 1]; peers = new String[np]; socketList = new Vector<Socket>(); int wport = 0; int rport = 0; int rank = 0; // np + 1 IOThreads for (int i = 0; i < (np + 1); i++) { try { sock = servSock.accept(); //start IO thread to read STDOUT and STDERR from wrappers IOMessagesThread io = new IOMessagesThread(sock); ioThreads[i] = io; ioThreads[i].start(); } catch (Exception e) { System.err.println("Error accepting connection from peer socket.."); e.printStackTrace(); } } // Loop to read port numbers from Wrapper.java processes // and to create WRAPPER_INFO (containing all IPs and ports) String WRAPPER_INFO = "#Peer Information"; for (int i = np; i > 0; i--) { try { sock = infoSock.accept(); DataOutputStream out = new DataOutputStream(sock.getOutputStream()); DataInputStream in = new DataInputStream(sock.getInputStream()); if (in.readUTF().startsWith("Sending Info")) { wport = in.readInt(); rport = in.readInt(); rank = in.readInt(); peers[rank] = ";" + sock.getInetAddress().getHostAddress() + "@" + rport + "@" + wport + "@" + rank; socketList.add(sock); } } catch (Exception e) { System.err.println("[MPJYarnClient.java]: Error accepting" + " connection from peer socket!"); e.printStackTrace(); } } for (int i = 0; i < np; i++) { WRAPPER_INFO += peers[i]; } // Loop to broadcast WRAPPER_INFO to all Wrappers for (int i = np; i > 0; i--) { try { sock = socketList.get(np - i); DataOutputStream out = new DataOutputStream(sock.getOutputStream()); out.writeUTF(WRAPPER_INFO); out.flush(); sock.close(); } catch (Exception e) { System.err.println("[MPJYarnClient.java]: Error closing" + " connection from peer socket.."); e.printStackTrace(); } } try { infoSock.close(); } catch (IOException exp) { exp.printStackTrace(); } // wait for all IO Threads to complete for (int i = 0; i < (np + 1); i++) { ioThreads[i].join(); } isRunning = true; System.out.println("\nApplication Statistics!"); while (true) { appReport = yarnClient.getApplicationReport(appId); appState = appReport.getYarnApplicationState(); fStatus = appReport.getFinalApplicationStatus(); if (appState == YarnApplicationState.FINISHED) { isRunning = false; if (fStatus == FinalApplicationStatus.SUCCEEDED) { System.out.println("State: " + fStatus); } else { System.out.println("State: " + fStatus); } break; } else if (appState == YarnApplicationState.KILLED) { isRunning = false; System.out.println("State: " + appState); break; } else if (appState == YarnApplicationState.FAILED) { isRunning = false; System.out.println("State: " + appState); break; } Thread.sleep(100); } try { if (debugYarn) { logger.info("Cleaning the files from hdfs: "); logger.info("1) " + dest.toString()); logger.info("2) " + wrapperDest.toString()); logger.info("3) " + userJarDest.toString()); } fs.delete(dest); fs.delete(wrapperDest); fs.delete(userJarDest); } catch (IOException exp) { exp.printStackTrace(); } System.out.println("Application ID: " + appId + "\n" + "Application User: " + appReport.getUser() + "\n" + "RM Queue: " + appReport.getQueue() + "\n" + "Start Time: " + appReport.getStartTime() + "\n" + "Finish Time: " + appReport.getFinishTime()); }
From source file:org.apache.hadoop.hbase.util.FSUtils.java
/** * Verifies current version of file system * * @param fs filesystem object/* w w w. ja v a 2s . co m*/ * @param rootdir root hbase directory * @return null if no version file exists, version string otherwise. * @throws IOException e * @throws org.apache.hadoop.hbase.exceptions.DeserializationException */ public static String getVersion(FileSystem fs, Path rootdir) throws IOException, DeserializationException { Path versionFile = new Path(rootdir, HConstants.VERSION_FILE_NAME); FileStatus[] status = null; try { // hadoop 2.0 throws FNFE if directory does not exist. // hadoop 1.0 returns null if directory does not exist. status = fs.listStatus(versionFile); } catch (FileNotFoundException fnfe) { return null; } if (status == null || status.length == 0) return null; String version = null; byte[] content = new byte[(int) status[0].getLen()]; FSDataInputStream s = fs.open(versionFile); try { IOUtils.readFully(s, content, 0, content.length); if (ProtobufUtil.isPBMagicPrefix(content)) { version = parseVersionFrom(content); } else { // Presume it pre-pb format. InputStream is = new ByteArrayInputStream(content); DataInputStream dis = new DataInputStream(is); try { version = dis.readUTF(); } finally { dis.close(); } // Update the format LOG.info("Updating the hbase.version file format with version=" + version); setVersion(fs, rootdir, version, 0, HConstants.DEFAULT_VERSION_FILE_WRITE_ATTEMPTS); } } catch (EOFException eof) { LOG.warn("Version file was empty, odd, will try to set it."); } finally { s.close(); } return version; }
From source file:org.sakaiproject.util.serialize.Type1BaseResourcePropertiesSerializer.java
/** * @see org.sakaiproject.entity.api.serialize.DataStreamEntitySerializer#parse(org.sakaiproject.entity.api.serialize.SerializableEntity, * java.io.DataInputStream)/*from w w w . jav a 2 s . co m*/ */ public void parse(SerializableEntity se, DataInputStream ds) throws EntityParseException { if (!(se instanceof SerializablePropertiesAccess)) { throw new EntityParseException("Cant serialize " + se + " as it is not a SerializableProperties "); } SerializablePropertiesAccess sp = (SerializablePropertiesAccess) se; Map<String, Object> properties = new HashMap<String, Object>(); try { int type = ds.readInt(); if (type == TYPE1) { int block = ds.readInt(); if (block == BLOCK1) { int nprops = ds.readInt(); for (int i = 0; i < nprops; i++) { block = ds.readInt(); switch (block) { case BLOCK2: { String key = ds.readUTF(); String value = ds.readUTF(); properties.put(key, value); } break; case BLOCK3: { String key = ds.readUTF(); int n = ds.readInt(); List<String> l = new Vector<String>(); for (int j = 0; j < n; j++) { l.add(ds.readUTF()); } properties.put(key, l); } break; default: throw new EntityParseException("Unrecognised block number " + block); } } sp.setSerializableProperties(properties); } else { throw new EntityParseException("Failed to parse entity, unrecognised block " + block); } } else { throw new EntityParseException("Cant Parse block, resource properties is not type 1 " + type); } } catch (EntityParseException ep) { throw ep; } catch (Exception ex) { throw new EntityParseException("Failed to parse entity ", ex); } }
From source file:PersistentRankingMIDlet.java
public int compare(byte[] book1, byte[] book2) { try {/* w w w . j av a 2s . com*/ DataInputStream stream1 = new DataInputStream(new ByteArrayInputStream(book1)); DataInputStream stream2 = new DataInputStream(new ByteArrayInputStream(book2)); // Match based on the ISBN, but sort based on the title. String isbn1 = stream1.readUTF(); String isbn2 = stream2.readUTF(); if (isbn1.equals(isbn2)) { return RecordComparator.EQUIVALENT; } String title1 = stream1.readUTF(); String title2 = stream2.readUTF(); int result = title1.compareTo(title2); if (result == 0) { return RecordComparator.EQUIVALENT; } return result < 0 ? RecordComparator.PRECEDES : RecordComparator.FOLLOWS; } catch (IOException ex) { return RecordComparator.EQUIVALENT; } }
From source file:org.apache.pulsar.testclient.LoadSimulationClient.java
private void handle(final byte command, final DataInputStream inputStream, final DataOutputStream outputStream) throws Exception { final TradeConfiguration tradeConf = new TradeConfiguration(); tradeConf.command = command;//from w ww . j av a 2 s . c o m switch (command) { case CHANGE_COMMAND: // Change the topic's settings if it exists. decodeProducerOptions(tradeConf, inputStream); if (topicsToTradeUnits.containsKey(tradeConf.topic)) { topicsToTradeUnits.get(tradeConf.topic).change(tradeConf); } break; case STOP_COMMAND: // Stop the topic if it exists. tradeConf.topic = inputStream.readUTF(); if (topicsToTradeUnits.containsKey(tradeConf.topic)) { topicsToTradeUnits.get(tradeConf.topic).stop.set(true); } break; case TRADE_COMMAND: // Create the topic. It is assumed that the topic does not already exist. decodeProducerOptions(tradeConf, inputStream); final TradeUnit tradeUnit = new TradeUnit(tradeConf, client, producerConf, consumerConf, payloadCache); topicsToTradeUnits.put(tradeConf.topic, tradeUnit); executor.submit(() -> { try { final String topic = tradeConf.topic; final String namespace = topic.substring("persistent://".length(), topic.lastIndexOf('/')); try { admin.namespaces().createNamespace(namespace); } catch (PulsarAdminException.ConflictException e) { // Ignore, already created namespace. } tradeUnit.start(); } catch (Exception ex) { throw new RuntimeException(ex); } }); break; case CHANGE_GROUP_COMMAND: // Change the settings of all topics belonging to a group. decodeGroupOptions(tradeConf, inputStream); tradeConf.size = inputStream.readInt(); tradeConf.rate = inputStream.readDouble(); // See if a topic belongs to this tenant and group using this regex. final String groupRegex = ".*://" + tradeConf.tenant + "/.*/" + tradeConf.group + "-.*/.*"; for (Map.Entry<String, TradeUnit> entry : topicsToTradeUnits.entrySet()) { final String destination = entry.getKey(); final TradeUnit unit = entry.getValue(); if (destination.matches(groupRegex)) { unit.change(tradeConf); } } break; case STOP_GROUP_COMMAND: // Stop all topics belonging to a group. decodeGroupOptions(tradeConf, inputStream); // See if a topic belongs to this tenant and group using this regex. final String regex = ".*://" + tradeConf.tenant + "/.*/" + tradeConf.group + "-.*/.*"; for (Map.Entry<String, TradeUnit> entry : topicsToTradeUnits.entrySet()) { final String destination = entry.getKey(); final TradeUnit unit = entry.getValue(); if (destination.matches(regex)) { unit.stop.set(true); } } break; case FIND_COMMAND: // Write a single boolean indicating if the topic was found. outputStream.writeBoolean(topicsToTradeUnits.containsKey(inputStream.readUTF())); outputStream.flush(); break; default: throw new IllegalArgumentException("Unrecognized command code received: " + command); } }
From source file:ch.unil.genescore.vegas.Snp.java
public void readPosAndMinorAllele(DataInputStream is) throws IOException, DataInconsistencyException { //TODO: Only solves homegrown case atm; // @David I changed this to an IllegalArgumentException because the other one was unknown on my system // id_ is already read //String curChr = chr_; //int curStart = start_; //int curEnd = end_; chr_ = is.readUTF(); start_ = is.readInt();//from w w w .ja v a 2 s. co m end_ = is.readInt(); //if (curChr != null || curStart != -1 || curEnd != -1){ // if (!chr_.equals(curChr) || start_ != curStart || end_ != curEnd){ // throw new RuntimeException("snp seems to have been set before to another value"); //} //} posStrand_ = is.readBoolean(); minorAllele_ = is.readChar(); }
From source file:ApplicationMaster.java
private String readContent(String filePath) throws IOException { DataInputStream ds = null; try {/* w w w .ja va2 s .c o m*/ ds = new DataInputStream(new FileInputStream(filePath)); return ds.readUTF(); } finally { org.apache.commons.io.IOUtils.closeQuietly(ds); } }
From source file:ch.unil.genescore.vegas.Snp.java
public void readPosAndAllele(DataInputStream is) throws IOException, DataInconsistencyException { //TODO: Only solves homegrown case atm; // @David I changed this to an IllegalArgumentException because the other one was unknown on my system // id_ is already read //String curChr = chr_; //int curStart = start_; //int curEnd = end_; chr_ = is.readUTF(); start_ = is.readInt();// w ww .j a v a 2s . c o m end_ = is.readInt(); //if (curChr != null || curStart != -1 || curEnd != -1){ // if (!chr_.equals(curChr) || start_ != curStart || end_ != curEnd){ // throw new RuntimeException("snp seems to have been set before to another value"); //} //} posStrand_ = is.readBoolean(); char minorAllele = is.readChar(); boolean snpHasBeenSeenInGWAS = false; if (minorAllele_ != 'N' || majorAllele_ != 'N') { snpHasBeenSeenInGWAS = true; } if (Pascal.set.withZScore_ && minorAllele_ != minorAllele) { if (minorAllele != majorAllele_ && snpHasBeenSeenInGWAS) { throw new DataInconsistencyException( "different minor allele of reference population not found GWAS data. Snp left out."); } zscore_ *= -1; char minorAlleleSummaryFile = minorAllele_; char majorAlleleSummaryFile = majorAllele_; minorAllele_ = majorAlleleSummaryFile; majorAllele_ = minorAlleleSummaryFile; } }
From source file:hudson.cli.CLI.java
/** * @deprecated Specific to {@link Mode#REMOTING}. *//*from www. ja v a 2 s . c o m*/ @Deprecated private Channel connectViaCliPort(URL jenkins, CliPort clip) throws IOException { LOGGER.log(FINE, "Trying to connect directly via Remoting over TCP/IP to {0}", clip.endpoint); if (authorization != null) { LOGGER.warning("-auth ignored when using JNLP agent port"); } final Socket s = new Socket(); // this prevents a connection from silently terminated by the router in between or the other peer // and that goes without unnoticed. However, the time out is often very long (for example 2 hours // by default in Linux) that this alone is enough to prevent that. s.setKeepAlive(true); // we take care of buffering on our own s.setTcpNoDelay(true); OutputStream out; if (httpsProxyTunnel != null) { String[] tokens = httpsProxyTunnel.split(":"); LOGGER.log(Level.FINE, "Using HTTP proxy {0}:{1} to connect to CLI port", new Object[] { tokens[0], tokens[1] }); s.connect(new InetSocketAddress(tokens[0], Integer.parseInt(tokens[1]))); PrintStream o = new PrintStream(s.getOutputStream()); o.print("CONNECT " + clip.endpoint.getHostString() + ":" + clip.endpoint.getPort() + " HTTP/1.0\r\n\r\n"); // read the response from the proxy ByteArrayOutputStream rsp = new ByteArrayOutputStream(); while (!rsp.toString("ISO-8859-1").endsWith("\r\n\r\n")) { int ch = s.getInputStream().read(); if (ch < 0) throw new IOException("Failed to read the HTTP proxy response: " + rsp); rsp.write(ch); } String head = new BufferedReader(new StringReader(rsp.toString("ISO-8859-1"))).readLine(); if (head == null) { throw new IOException("Unexpected empty response"); } if (!(head.startsWith("HTTP/1.0 200 ") || head.startsWith("HTTP/1.1 200 "))) { s.close(); LOGGER.log(Level.SEVERE, "Failed to tunnel the CLI port through the HTTP proxy. Falling back to HTTP."); throw new IOException("Failed to establish a connection through HTTP proxy: " + rsp); } // HTTP proxies (at least the one I tried --- squid) doesn't seem to do half-close very well. // So instead of relying on it, we'll just send the close command and then let the server // cut their side, then close the socket after the join. out = new SocketOutputStream(s) { @Override public void close() throws IOException { // ignore } }; } else { s.connect(clip.endpoint, 3000); out = SocketChannelStream.out(s); } closables.add(new Closeable() { public void close() throws IOException { s.close(); } }); Connection c = new Connection(SocketChannelStream.in(s), out); switch (clip.version) { case 1: DataOutputStream dos = new DataOutputStream(s.getOutputStream()); dos.writeUTF("Protocol:CLI-connect"); // we aren't checking greeting from the server here because I'm too lazy. It gets ignored by Channel constructor. break; case 2: DataInputStream dis = new DataInputStream(s.getInputStream()); dos = new DataOutputStream(s.getOutputStream()); dos.writeUTF("Protocol:CLI2-connect"); String greeting = dis.readUTF(); if (!greeting.equals("Welcome")) throw new IOException("Handshaking failed: " + greeting); try { byte[] secret = c.diffieHellman(false).generateSecret(); SecretKey sessionKey = new SecretKeySpec(Connection.fold(secret, 128 / 8), "AES"); c = c.encryptConnection(sessionKey, "AES/CFB8/NoPadding"); // validate the instance identity, so that we can be sure that we are talking to the same server // and there's no one in the middle. byte[] signature = c.readByteArray(); if (clip.identity != null) { Signature verifier = Signature.getInstance("SHA1withRSA"); verifier.initVerify(clip.getIdentity()); verifier.update(secret); if (!verifier.verify(signature)) throw new IOException("Server identity signature validation failed."); } } catch (GeneralSecurityException e) { throw (IOException) new IOException("Failed to negotiate transport security").initCause(e); } } return new Channel("CLI connection to " + jenkins, pool, new BufferedInputStream(c.in), new BufferedOutputStream(c.out)); }
From source file:com.chaosinmotion.securechat.messages.SCMessageQueue.java
/** * Process a data packet from the back end notification service. A data * packet response form the back end has the format: * * first byte//from w w w.j a va 2s.co m * 0x20 Message * 0x21 Token response * 0x22 Login failure * * Note login success is implicit; if login worked, we start receiving * message notifications, starting with the backlog of stored messages * waiting for us */ private void processDataPacket(byte[] data) { if (data.length == 0) return; if (data[0] == 0x20) { /* * Process received message. */ ByteArrayInputStream bais = new ByteArrayInputStream(data, 1, data.length - 1); DataInputStream dis = new DataInputStream(bais); try { boolean toflag = dis.readBoolean(); int messageID = dis.readInt(); int senderID = dis.readInt(); String ts = dis.readUTF(); String senderName = dis.readUTF(); int messagelen = dis.readInt(); byte[] message = new byte[messagelen]; dis.read(message); dis.close(); insertMessage(senderID, senderName, toflag, messageID, DateUtils.parseServerDate(ts), message); } catch (IOException e) { e.printStackTrace(); } } else if (data[0] == 0x21) { /* * Received token; rest is string */ try { String token = new String(data, 1, data.length - 1, "UTF-8"); loginPhaseTwo(token); } catch (UnsupportedEncodingException e) { // SHould never happen } } else if (data[0] == 0x22) { /* * Login failure. Close connection and start polling */ closeConnection(); startPolling("Login failure"); } }