List of usage examples for java.io DataInputStream readInt
public final int readInt() throws IOException
readInt
method of DataInput
. From source file:com.chaosinmotion.securechat.messages.SCMessageQueue.java
/** * Process a data packet from the back end notification service. A data * packet response form the back end has the format: * * first byte// w w w . ja v a 2 s . c o m * 0x20 Message * 0x21 Token response * 0x22 Login failure * * Note login success is implicit; if login worked, we start receiving * message notifications, starting with the backlog of stored messages * waiting for us */ private void processDataPacket(byte[] data) { if (data.length == 0) return; if (data[0] == 0x20) { /* * Process received message. */ ByteArrayInputStream bais = new ByteArrayInputStream(data, 1, data.length - 1); DataInputStream dis = new DataInputStream(bais); try { boolean toflag = dis.readBoolean(); int messageID = dis.readInt(); int senderID = dis.readInt(); String ts = dis.readUTF(); String senderName = dis.readUTF(); int messagelen = dis.readInt(); byte[] message = new byte[messagelen]; dis.read(message); dis.close(); insertMessage(senderID, senderName, toflag, messageID, DateUtils.parseServerDate(ts), message); } catch (IOException e) { e.printStackTrace(); } } else if (data[0] == 0x21) { /* * Received token; rest is string */ try { String token = new String(data, 1, data.length - 1, "UTF-8"); loginPhaseTwo(token); } catch (UnsupportedEncodingException e) { // SHould never happen } } else if (data[0] == 0x22) { /* * Login failure. Close connection and start polling */ closeConnection(); startPolling("Login failure"); } }
From source file:runtime.starter.MPJYarnClient.java
public void run() throws Exception { Map<String, String> map = System.getenv(); try {// w w w.ja v a 2 s . c o m mpjHomeDir = map.get("MPJ_HOME"); if (mpjHomeDir == null) { throw new Exception("[MPJRun.java]:MPJ_HOME environment found.."); } } catch (Exception exc) { System.out.println("[MPJRun.java]:" + exc.getMessage()); exc.printStackTrace(); return; } // Copy the application master jar to HDFS // Create a local resource to point to the destination jar path FileSystem fs = FileSystem.get(conf); /* Path dataset = new Path(fs.getHomeDirectory(),"/dataset"); FileStatus datasetFile = fs.getFileStatus(dataset); BlockLocation myBlocks [] = fs.getFileBlockLocations(datasetFile,0,datasetFile.getLen()); for(BlockLocation b : myBlocks){ System.out.println("\n--------------------"); System.out.println("Length "+b.getLength()); for(String host : b.getHosts()){ System.out.println("host "+host); } } */ Path source = new Path(mpjHomeDir + "/lib/mpj-app-master.jar"); String pathSuffix = hdfsFolder + "mpj-app-master.jar"; Path dest = new Path(fs.getHomeDirectory(), pathSuffix); if (debugYarn) { logger.info("Uploading mpj-app-master.jar to: " + dest.toString()); } fs.copyFromLocalFile(false, true, source, dest); FileStatus destStatus = fs.getFileStatus(dest); Path wrapperSource = new Path(mpjHomeDir + "/lib/mpj-yarn-wrapper.jar"); String wrapperSuffix = hdfsFolder + "mpj-yarn-wrapper.jar"; Path wrapperDest = new Path(fs.getHomeDirectory(), wrapperSuffix); if (debugYarn) { logger.info("Uploading mpj-yarn-wrapper.jar to: " + wrapperDest.toString()); } fs.copyFromLocalFile(false, true, wrapperSource, wrapperDest); Path userJar = new Path(jarPath); String userJarSuffix = hdfsFolder + "user-code.jar"; Path userJarDest = new Path(fs.getHomeDirectory(), userJarSuffix); if (debugYarn) { logger.info("Uploading user-code.jar to: " + userJarDest.toString()); } fs.copyFromLocalFile(false, true, userJar, userJarDest); YarnConfiguration conf = new YarnConfiguration(); YarnClient yarnClient = YarnClient.createYarnClient(); yarnClient.init(conf); yarnClient.start(); if (debugYarn) { YarnClusterMetrics metrics = yarnClient.getYarnClusterMetrics(); logger.info("\nNodes Information"); logger.info("Number of NM: " + metrics.getNumNodeManagers() + "\n"); List<NodeReport> nodeReports = yarnClient.getNodeReports(NodeState.RUNNING); for (NodeReport n : nodeReports) { logger.info("NodeId: " + n.getNodeId()); logger.info("RackName: " + n.getRackName()); logger.info("Total Memory: " + n.getCapability().getMemory()); logger.info("Used Memory: " + n.getUsed().getMemory()); logger.info("Total vCores: " + n.getCapability().getVirtualCores()); logger.info("Used vCores: " + n.getUsed().getVirtualCores() + "\n"); } } logger.info("Creating server socket at HOST " + serverName + " PORT " + serverPort + " \nWaiting for " + np + " processes to connect..."); // Creating a server socket for incoming connections try { servSock = new ServerSocket(serverPort); infoSock = new ServerSocket(); TEMP_PORT = findPort(infoSock); } catch (Exception e) { e.printStackTrace(); } // Create application via yarnClient YarnClientApplication app = yarnClient.createApplication(); GetNewApplicationResponse appResponse = app.getNewApplicationResponse(); int maxMem = appResponse.getMaximumResourceCapability().getMemory(); if (debugYarn) { logger.info("Max memory capability resources in cluster: " + maxMem); } if (amMem > maxMem) { amMem = maxMem; logger.info("AM memory specified above threshold of cluster " + "Using maximum memory for AM container: " + amMem); } int maxVcores = appResponse.getMaximumResourceCapability().getVirtualCores(); if (debugYarn) { logger.info("Max vCores capability resources in cluster: " + maxVcores); } if (amCores > maxVcores) { amCores = maxVcores; logger.info("AM virtual cores specified above threshold of cluster " + "Using maximum virtual cores for AM container: " + amCores); } // Set up the container launch context for the application master ContainerLaunchContext amContainer = Records.newRecord(ContainerLaunchContext.class); List<String> commands = new ArrayList<String>(); commands.add("$JAVA_HOME/bin/java"); commands.add("-Xmx" + amMem + "m"); commands.add("runtime.starter.MPJAppMaster"); commands.add("--np"); commands.add(String.valueOf(np)); commands.add("--serverName"); commands.add(serverName); //server name commands.add("--ioServerPort"); commands.add(Integer.toString(serverPort)); //server port commands.add("--deviceName"); commands.add(deviceName); //device name commands.add("--className"); commands.add(className); //class name commands.add("--wdir"); commands.add(workingDirectory); //wdir commands.add("--psl"); commands.add(Integer.toString(psl)); //protocol switch limit commands.add("--wireUpPort"); commands.add(String.valueOf(TEMP_PORT)); //for sharing ports & rank commands.add("--wrapperPath"); commands.add(wrapperDest.toString());//MPJYarnWrapper.jar HDFS path commands.add("--userJarPath"); commands.add(userJarDest.toString());//User Jar File HDFS path commands.add("--mpjContainerPriority"); commands.add(mpjContainerPriority);// priority for mpj containers commands.add("--containerMem"); commands.add(containerMem); commands.add("--containerCores"); commands.add(containerCores); if (debugYarn) { commands.add("--debugYarn"); } if (appArgs != null) { commands.add("--appArgs"); for (int i = 0; i < appArgs.length; i++) { commands.add(appArgs[i]); } } amContainer.setCommands(commands); //set commands // Setup local Resource for ApplicationMaster LocalResource appMasterJar = Records.newRecord(LocalResource.class); appMasterJar.setResource(ConverterUtils.getYarnUrlFromPath(dest)); appMasterJar.setSize(destStatus.getLen()); appMasterJar.setTimestamp(destStatus.getModificationTime()); appMasterJar.setType(LocalResourceType.ARCHIVE); appMasterJar.setVisibility(LocalResourceVisibility.APPLICATION); amContainer.setLocalResources(Collections.singletonMap("mpj-app-master.jar", appMasterJar)); // Setup CLASSPATH for ApplicationMaster // Setting up the environment Map<String, String> appMasterEnv = new HashMap<String, String>(); setupAppMasterEnv(appMasterEnv); amContainer.setEnvironment(appMasterEnv); // Set up resource type requirements for ApplicationMaster Resource capability = Records.newRecord(Resource.class); capability.setMemory(amMem); capability.setVirtualCores(amCores); // Finally, set-up ApplicationSubmissionContext for the application ApplicationSubmissionContext appContext = app.getApplicationSubmissionContext(); appContext.setApplicationName(appName); appContext.setAMContainerSpec(amContainer); appContext.setResource(capability); appContext.setQueue(yarnQueue); // queue Priority priority = Priority.newInstance(amPriority); appContext.setPriority(priority); ApplicationId appId = appContext.getApplicationId(); //Adding ShutDown Hook Runtime.getRuntime().addShutdownHook(new KillYarnApp(appId, yarnClient)); // Submit application System.out.println("Submitting Application: " + appContext.getApplicationName() + "\n"); try { isRunning = true; yarnClient.submitApplication(appContext); } catch (Exception exp) { System.err.println("Error Submitting Application"); exp.printStackTrace(); } // np = number of processes , + 1 for Application Master container IOMessagesThread[] ioThreads = new IOMessagesThread[np + 1]; peers = new String[np]; socketList = new Vector<Socket>(); int wport = 0; int rport = 0; int rank = 0; // np + 1 IOThreads for (int i = 0; i < (np + 1); i++) { try { sock = servSock.accept(); //start IO thread to read STDOUT and STDERR from wrappers IOMessagesThread io = new IOMessagesThread(sock); ioThreads[i] = io; ioThreads[i].start(); } catch (Exception e) { System.err.println("Error accepting connection from peer socket.."); e.printStackTrace(); } } // Loop to read port numbers from Wrapper.java processes // and to create WRAPPER_INFO (containing all IPs and ports) String WRAPPER_INFO = "#Peer Information"; for (int i = np; i > 0; i--) { try { sock = infoSock.accept(); DataOutputStream out = new DataOutputStream(sock.getOutputStream()); DataInputStream in = new DataInputStream(sock.getInputStream()); if (in.readUTF().startsWith("Sending Info")) { wport = in.readInt(); rport = in.readInt(); rank = in.readInt(); peers[rank] = ";" + sock.getInetAddress().getHostAddress() + "@" + rport + "@" + wport + "@" + rank; socketList.add(sock); } } catch (Exception e) { System.err.println("[MPJYarnClient.java]: Error accepting" + " connection from peer socket!"); e.printStackTrace(); } } for (int i = 0; i < np; i++) { WRAPPER_INFO += peers[i]; } // Loop to broadcast WRAPPER_INFO to all Wrappers for (int i = np; i > 0; i--) { try { sock = socketList.get(np - i); DataOutputStream out = new DataOutputStream(sock.getOutputStream()); out.writeUTF(WRAPPER_INFO); out.flush(); sock.close(); } catch (Exception e) { System.err.println("[MPJYarnClient.java]: Error closing" + " connection from peer socket.."); e.printStackTrace(); } } try { infoSock.close(); } catch (IOException exp) { exp.printStackTrace(); } // wait for all IO Threads to complete for (int i = 0; i < (np + 1); i++) { ioThreads[i].join(); } isRunning = true; System.out.println("\nApplication Statistics!"); while (true) { appReport = yarnClient.getApplicationReport(appId); appState = appReport.getYarnApplicationState(); fStatus = appReport.getFinalApplicationStatus(); if (appState == YarnApplicationState.FINISHED) { isRunning = false; if (fStatus == FinalApplicationStatus.SUCCEEDED) { System.out.println("State: " + fStatus); } else { System.out.println("State: " + fStatus); } break; } else if (appState == YarnApplicationState.KILLED) { isRunning = false; System.out.println("State: " + appState); break; } else if (appState == YarnApplicationState.FAILED) { isRunning = false; System.out.println("State: " + appState); break; } Thread.sleep(100); } try { if (debugYarn) { logger.info("Cleaning the files from hdfs: "); logger.info("1) " + dest.toString()); logger.info("2) " + wrapperDest.toString()); logger.info("3) " + userJarDest.toString()); } fs.delete(dest); fs.delete(wrapperDest); fs.delete(userJarDest); } catch (IOException exp) { exp.printStackTrace(); } System.out.println("Application ID: " + appId + "\n" + "Application User: " + appReport.getUser() + "\n" + "RM Queue: " + appReport.getQueue() + "\n" + "Start Time: " + appReport.getStartTime() + "\n" + "Finish Time: " + appReport.getFinishTime()); }
From source file:IntSort.java
public void readStream() { try {// www . ja v a2s .co m // Careful: Make sure this is big enough! // Better yet, test and reallocate if necessary byte[] recData = new byte[50]; // Read from the specified byte array ByteArrayInputStream strmBytes = new ByteArrayInputStream(recData); // Read Java data types from the above byte array DataInputStream strmDataType = new DataInputStream(strmBytes); if (rs.getNumRecords() > 0) { ComparatorInt comp = new ComparatorInt(); int i = 1; RecordEnumeration re = rs.enumerateRecords(null, comp, false); while (re.hasNextElement()) { // Get data into the byte array rs.getRecord(re.nextRecordId(), recData, 0); // Read back the data types System.out.println("Record #" + i++); System.out.println("Name: " + strmDataType.readUTF()); System.out.println("Dog: " + strmDataType.readBoolean()); System.out.println("Rank: " + strmDataType.readInt()); System.out.println("--------------------"); // Reset so read starts at beginning of array strmBytes.reset(); } comp.compareIntClose(); // Free enumerator re.destroy(); } strmBytes.close(); strmDataType.close(); } catch (Exception e) { db(e.toString()); } }
From source file:org.commoncrawl.service.crawlhistory.CrawlHistoryServer.java
private boolean validateOnDiskVersion() throws IOException { FileSystem fs = CrawlEnvironment.getDefaultFileSystem(); Path dataFilePath = getDataFileFinalPath(); LOG.info("Loading BloomFilter From Disk at Path:" + dataFilePath); if (fs.exists(dataFilePath)) { FSDataInputStream stream = null; try {/* ww w . j a v a 2 s. c o m*/ stream = fs.open(dataFilePath); DataInputStream dataInput = new DataInputStream(stream); // skip version dataInput.readInt(); // read crawl version ... int serializedCrawlVersion = dataInput.readInt(); LOG.info("BloomFilter From On Disk has CrawlVersion:" + serializedCrawlVersion); if (serializedCrawlVersion < _state.getCurrentCrawlNumber()) { LOG.error("skipping load because serial crawl number is less than current crawl"); stream.close(); stream = null; fs.rename(dataFilePath, new Path(dataFilePath.getParent(), dataFilePath.getName() + "-V-" + serializedCrawlVersion)); return false; } return true; } finally { if (stream != null) stream.close(); } } return false; }
From source file:openaf.AFCmdOS.java
/** * //w w w. ja v a2s .c o m * @param in * @param out * @param appoperation2 * @throws Exception */ protected com.google.gson.JsonObject execute(com.google.gson.JsonObject pmIn, String op, boolean processScript, StringBuilder theInput, boolean isolatePMs) throws Exception { // 3. Process input // INPUT_TYPE = inputtype.INPUT_SCRIPT; if (((!pipe) && (!filescript) && (!processScript) && (!injectcode) && (!injectclass))) { if (Desktop.isDesktopSupported() && Desktop.getDesktop().isSupported(Desktop.Action.BROWSE)) { /*injectscript = true; injectscriptfile = "/js/openafgui.js"; filescript = true;*/ injectclass = true; injectclassfile = "openafgui_js"; filescript = false; silentMode = true; } } if (processScript || filescript || injectcode || injectclass) { // Obtain script String script = null; if (filescript) { if (injectscript) { script = IOUtils.toString(getClass().getResourceAsStream(injectscriptfile), "UTF-8"); } else { boolean isZip = false; boolean isOpack = false; com.google.gson.JsonObject pm = null; ZipFile tmpZip = null; // Determine if it's opack/zip DataInputStream dis = new DataInputStream( new BufferedInputStream(new FileInputStream(scriptfile.replaceFirst("::[^:]+$", "")))); int test = dis.readInt(); dis.close(); if (test == 0x504b0304) { isZip = true; try { tmpZip = new ZipFile(scriptfile.replaceFirst("::[^:]+$", "")); isOpack = tmpZip.getEntry(OPACK) != null; zip = tmpZip; } catch (Exception e) { } if (isOpack) { if (scriptfile.indexOf("::") <= 0) { pm = new Gson().fromJson( IOUtils.toString(zip.getInputStream(zip.getEntry(OPACK)), (Charset) null), JsonObject.class); try { pm.get("main"); } catch (Exception e) { isZip = false; } } } } // Read normal script or opack/zip if (isZip) { if (scriptfile.indexOf("::") <= 0 && isOpack) { if (pm.get("main").getAsString().length() > 0) { script = IOUtils.toString( zip.getInputStream(zip.getEntry(pm.get("main").getAsString())), "UTF-8"); scriptfile = scriptfile + "/" + pm.get("main").getAsString(); } else { throw new Exception("Can't execute main script in " + scriptfile); } } else { try { script = IOUtils.toString( zip.getInputStream(zip.getEntry(scriptfile.replaceFirst(".+::", ""))), "UTF-8"); } catch (NullPointerException e) { throw new Exception("Can't find " + scriptfile.replaceFirst(".+::", "")); } } } else { script = FileUtils.readFileToString(new File(scriptfile), (Charset) null); zip = null; } } } else { if (!injectclass) script = theInput.toString(); } if (script != null) { script = script.replaceAll("^#.*", "//"); script = script.replaceFirst(PREFIX_SCRIPT, ""); if (daemon) script = "ow.loadServer().simpleCheckIn('" + scriptfile + "'); " + script + "; ow.loadServer().daemon();"; if (injectcode) script += code; } Context cx = (Context) jse.getNotSafeContext(); cx.setErrorReporter(new OpenRhinoErrorReporter()); String includeScript = ""; NativeObject jsonPMOut = new NativeObject(); synchronized (this) { Object opmIn; opmIn = AFBase.jsonParse(pmIn.toString()); Object noSLF4JErrorOnly = Context.javaToJS(__noSLF4JErrorOnly, (Scriptable) jse.getGlobalscope()); ScriptableObject.putProperty((Scriptable) jse.getGlobalscope(), "__noSLF4JErrorOnly", noSLF4JErrorOnly); ScriptableObject.putProperty((Scriptable) jse.getGlobalscope(), "pmIn", opmIn); ScriptableObject.putProperty((Scriptable) jse.getGlobalscope(), "__pmIn", opmIn); // Add pmOut object Object opmOut = Context.javaToJS(jsonPMOut, (Scriptable) jse.getGlobalscope()); ScriptableObject.putProperty((Scriptable) jse.getGlobalscope(), "pmOut", opmOut); ScriptableObject.putProperty((Scriptable) jse.getGlobalscope(), "__pmOut", opmOut); // Add expr object Object opmExpr = Context.javaToJS(exprInput, (Scriptable) jse.getGlobalscope()); ScriptableObject.putProperty((Scriptable) jse.getGlobalscope(), "expr", opmExpr); ScriptableObject.putProperty((Scriptable) jse.getGlobalscope(), "__expr", opmExpr); ScriptableObject.putProperty((Scriptable) jse.getGlobalscope(), "__args", args); // Add scriptfile object if (filescript) { Object scriptFile = Context.javaToJS(scriptfile, (Scriptable) jse.getGlobalscope()); ScriptableObject.putProperty((Scriptable) jse.getGlobalscope(), "__scriptfile", scriptFile); ScriptableObject.putProperty((Scriptable) jse.getGlobalscope(), "__iszip", (zip == null) ? false : true); } // Add AF class ScriptableObject.defineClass((Scriptable) jse.getGlobalscope(), AFBase.class, false, true); // Add DB class ScriptableObject.defineClass((Scriptable) jse.getGlobalscope(), DB.class, false, true); // Add CSV class ScriptableObject.defineClass((Scriptable) jse.getGlobalscope(), CSV.class, false, true); // Add IO class ScriptableObject.defineClass((Scriptable) jse.getGlobalscope(), IOBase.class, false, true); // Add this object Scriptable afScript = null; //if (!ScriptableObject.hasProperty((Scriptable) jse.getGlobalscope(), "AF")) { afScript = (Scriptable) jse.newObject((Scriptable) jse.getGlobalscope(), "AF"); //} if (!ScriptableObject.hasProperty((Scriptable) jse.getGlobalscope(), "af")) ((IdScriptableObject) jse.getGlobalscope()).put("af", (Scriptable) jse.getGlobalscope(), afScript); // Add the IO object if (!ScriptableObject.hasProperty((Scriptable) jse.getGlobalscope(), "io")) ((IdScriptableObject) jse.getGlobalscope()).put("io", (Scriptable) jse.getGlobalscope(), jse.newObject(jse.getGlobalscope(), "IO")); } // Compile & execute script /*try { InputStream in1 = getClass().getResourceAsStream("/js/openaf.js"); includeScript = IOUtils.toString(in1, (Charset) null); numberOfIncludedLines = numberOfIncludedLines + includeScript.split("\r\n|\r|\n").length; AFCmdBase.jse.addNumberOfLines(includeScript); } catch (Exception e) { SimpleLog.log(logtype.DEBUG, "Error including openaf.js", e); }*/ AFBase.runFromClass(Class.forName("openaf_js").getDeclaredConstructor().newInstance()); cx.setErrorReporter(new OpenRhinoErrorReporter()); if (isolatePMs) { script = "(function(__pIn) { var __pmOut = {}; var __pmIn = __pIn; " + script + "; return __pmOut; })(" + pmIn.toString() + ")"; } Object res = null; if (injectscript || filescript || injectcode || processScript) { Context cxl = (Context) jse.enterContext(); org.mozilla.javascript.Script compiledScript = cxl.compileString(includeScript + script, scriptfile, 1, null); res = compiledScript.exec(cxl, (Scriptable) jse.getGlobalscope()); jse.exitContext(); } if (injectclass) { res = AFBase.runFromClass(Class.forName(injectclassfile).getDeclaredConstructor().newInstance()); } if (isolatePMs && res != null && !(res instanceof Undefined)) { jsonPMOut = (NativeObject) res; } else { // Obtain pmOut as output jsonPMOut = (NativeObject) ((ScriptableObject) jse.getGlobalscope()).get("__pmOut"); } // Convert to ParameterMap Object stringify = NativeJSON.stringify(cx, (Scriptable) jse.getGlobalscope(), jsonPMOut, null, null); com.google.gson.Gson gson = new com.google.gson.Gson(); pmOut = gson.fromJson(stringify.toString(), com.google.gson.JsonObject.class); // Leave Rhino //org.mozilla.javascript.Context.exit(); //jse.exitContext(); } return pmOut; }
From source file:com.p2p.misc.DeviceUtility.java
private Boolean RqsLocation(int cid, int lac) { Boolean result = false;//from ww w . j av a2 s . com String urlmmap = "http://www.google.com/glm/mmap"; try { if (simPresent() == 1) { if (!inAirplaneMode()) { if (CheckNetConnectivity(mactivity)) { URL url = new URL(urlmmap); URLConnection conn = url.openConnection(); HttpURLConnection httpConn = (HttpURLConnection) conn; httpConn.setRequestMethod("POST"); httpConn.setDoOutput(true); httpConn.setDoInput(true); httpConn.setReadTimeout(60000); httpConn.connect(); OutputStream outputStream = httpConn.getOutputStream(); WriteData(outputStream, cid, lac); InputStream inputStream = httpConn.getInputStream(); DataInputStream dataInputStream = new DataInputStream(inputStream); dataInputStream.readShort(); dataInputStream.readByte(); int code = dataInputStream.readInt(); System.out.println("code--->>" + code); if (code == 0) { myLatitude = dataInputStream.readInt(); myLongitude = dataInputStream.readInt(); System.out.println("myLatitude--->>" + myLatitude); System.out.println("myLongitude--->>" + myLongitude); result = true; } else { OpenCellID opencellid = new OpenCellID(); try { opencellid.GetOpenCellID(); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } } } } } } catch (ProtocolException e) { // TODO: handle exception System.out.println("In Protocol Exception"); latitude = "0"; longitude = "0"; } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); System.out.println("In IO Exception"); latitude = "0"; longitude = "0"; } return result; }
From source file:org.apache.hadoop.mapred.buffer.Manager.java
public void open() throws IOException { Configuration conf = tracker.conf(); int maxMaps = conf.getInt("mapred.tasktracker.map.tasks.maximum", 2); int maxReduces = conf.getInt("mapred.tasktracker.reduce.tasks.maximum", 1); InetSocketAddress serverAddress = getServerAddress(conf); this.server = RPC.getServer(this, serverAddress.getHostName(), serverAddress.getPort(), maxMaps + maxReduces, false, conf); this.server.start(); this.requestTransfer.setPriority(Thread.MAX_PRIORITY); this.requestTransfer.start(); /** The server socket and selector registration */ InetSocketAddress controlAddress = getControlAddress(conf); this.controlPort = controlAddress.getPort(); this.channel = ServerSocketChannel.open(); this.channel.socket().bind(controlAddress); this.acceptor = new Thread() { @Override// ww w . ja v a 2 s . c om public void run() { while (!isInterrupted()) { SocketChannel connection = null; try { connection = channel.accept(); DataInputStream in = new DataInputStream(connection.socket().getInputStream()); int numRequests = in.readInt(); for (int i = 0; i < numRequests; i++) { BufferRequest request = BufferRequest.read(in); if (request instanceof ReduceBufferRequest) { add((ReduceBufferRequest) request); LOG.info("add new request " + request); } else if (request instanceof MapBufferRequest) { add((MapBufferRequest) request); } } } catch (IOException e) { e.printStackTrace(); } finally { try { if (connection != null) connection.close(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } } } }; this.acceptor.setDaemon(true); this.acceptor.setPriority(Thread.MAX_PRIORITY); this.acceptor.start(); this.serviceQueue = new Thread() { public void run() { List<OutputFile> service = new ArrayList<OutputFile>(); while (!isInterrupted()) { try { OutputFile o = queue.take(); service.add(o); queue.drainTo(service); for (OutputFile file : service) { try { if (file != null) add(file); } catch (Throwable t) { t.printStackTrace(); LOG.error("Error service file: " + file + ". " + t); } } } catch (Throwable t) { t.printStackTrace(); LOG.error(t); } finally { service.clear(); } } LOG.info("Service queue thread exit."); } }; this.serviceQueue.setPriority(Thread.MAX_PRIORITY); this.serviceQueue.setDaemon(true); this.serviceQueue.start(); }
From source file:utility.DeviceUtility.java
private Boolean RqsLocation(int cid, int lac) { Boolean result = false;/* w w w . ja v a 2s . c o m*/ String urlmmap = "http://www.google.com/glm/mmap"; try { if (simPresent() == 1) { if (!inAirplaneMode()) { if (CheckNetConnectivity(mactivity)) { URL url = new URL(urlmmap); URLConnection conn = url.openConnection(); HttpURLConnection httpConn = (HttpURLConnection) conn; httpConn.setRequestMethod("POST"); httpConn.setDoOutput(true); httpConn.setDoInput(true); httpConn.setReadTimeout(60000); httpConn.connect(); OutputStream outputStream = httpConn.getOutputStream(); WriteData(outputStream, cid, lac); InputStream inputStream = httpConn.getInputStream(); DataInputStream dataInputStream = new DataInputStream(inputStream); dataInputStream.readShort(); dataInputStream.readByte(); int code = dataInputStream.readInt(); System.out.println("code--->>" + code); if (code == 0) { myLatitude = dataInputStream.readInt(); myLongitude = dataInputStream.readInt(); System.out.println("myLatitude--->>" + myLatitude); System.out.println("myLongitude--->>" + myLongitude); result = true; } /*else { OpenCellID opencellid=new OpenCellID(); try { opencellid.GetOpenCellID(); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } }*/ } } } } catch (ProtocolException e) { // TODO: handle exception System.out.println("In Protocol Exception"); latitude = "0"; longitude = "0"; } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); System.out.println("In IO Exception"); latitude = "0"; longitude = "0"; } return result; }
From source file:org.apache.hadoop.hbase.util.RegionMover.java
private List<HRegionInfo> readRegionsFromFile(String filename) throws IOException { List<HRegionInfo> regions = new ArrayList<HRegionInfo>(); File f = new File(filename); if (!f.exists()) { return regions; }/* ww w . j a va 2s .c om*/ FileInputStream fis = null; DataInputStream dis = null; try { fis = new FileInputStream(f); dis = new DataInputStream(fis); int numRegions = dis.readInt(); int index = 0; while (index < numRegions) { regions.add(HRegionInfo.parseFromOrNull(Bytes.readByteArray(dis))); index++; } } catch (IOException e) { LOG.error("Error while reading regions from file:" + filename, e); throw e; } finally { if (dis != null) { dis.close(); } if (fis != null) { fis.close(); } } return regions; }
From source file:ch.unil.genescore.vegas.Snp.java
public void readPosAndMinorAllele(DataInputStream is) throws IOException, DataInconsistencyException { //TODO: Only solves homegrown case atm; // @David I changed this to an IllegalArgumentException because the other one was unknown on my system // id_ is already read //String curChr = chr_; //int curStart = start_; //int curEnd = end_; chr_ = is.readUTF();//from w w w .jav a2s . co m start_ = is.readInt(); end_ = is.readInt(); //if (curChr != null || curStart != -1 || curEnd != -1){ // if (!chr_.equals(curChr) || start_ != curStart || end_ != curEnd){ // throw new RuntimeException("snp seems to have been set before to another value"); //} //} posStrand_ = is.readBoolean(); minorAllele_ = is.readChar(); }