List of usage examples for java.io DataInputStream close
public void close() throws IOException
From source file:org.apache.hadoop.fs.TestDFSIO.java
private static void analyzeResult(FileSystem fs, int testType, long execTime, String resFileName) throws IOException { Path reduceFile;//from w ww . jav a2 s. c om if (testType == TEST_TYPE_WRITE) reduceFile = new Path(WRITE_DIR, "part-00000"); else reduceFile = new Path(READ_DIR, "part-00000"); long tasks = 0; long size = 0; long time = 0; float rate = 0; float sqrate = 0; DataInputStream in = null; BufferedReader lines = null; try { in = new DataInputStream(fs.open(reduceFile)); lines = new BufferedReader(new InputStreamReader(in)); String line; while ((line = lines.readLine()) != null) { StringTokenizer tokens = new StringTokenizer(line, " \t\n\r\f%"); String attr = tokens.nextToken(); if (attr.endsWith(":tasks")) tasks = Long.parseLong(tokens.nextToken()); else if (attr.endsWith(":size")) size = Long.parseLong(tokens.nextToken()); else if (attr.endsWith(":time")) time = Long.parseLong(tokens.nextToken()); else if (attr.endsWith(":rate")) rate = Float.parseFloat(tokens.nextToken()); else if (attr.endsWith(":sqrate")) sqrate = Float.parseFloat(tokens.nextToken()); } } finally { if (in != null) in.close(); if (lines != null) lines.close(); } double med = rate / 1000 / tasks; double stdDev = Math.sqrt(Math.abs(sqrate / 1000 / tasks - med * med)); String resultLines[] = { "----- TestDFSIO ----- : " + ((testType == TEST_TYPE_WRITE) ? "write" : (testType == TEST_TYPE_READ) ? "read" : "unknown"), " Date & time: " + new Date(System.currentTimeMillis()), " Number of files: " + tasks, "Total MBytes processed: " + size / MEGA, " Throughput mb/sec: " + size * 1000.0 / (time * MEGA), "Average IO rate mb/sec: " + med, " IO rate std deviation: " + stdDev, " Test exec time sec: " + (float) execTime / 1000, "" }; PrintStream res = null; try { res = new PrintStream(new FileOutputStream(new File(resFileName), true)); for (int i = 0; i < resultLines.length; i++) { LOG.info(resultLines[i]); res.println(resultLines[i]); } } finally { if (res != null) res.close(); } }
From source file:com.cloudera.recordbreaker.analyzer.UnknownTextSchemaDescriptor.java
void computeSchema() throws IOException { this.randId = new Random().nextInt(); LearnStructure ls = new LearnStructure(); FileSystem fs = FSAnalyzer.getInstance().getFS(); FileSystem localFS = FileSystem.getLocal(new Configuration()); Path inputPath = dd.getFilename(); File workingParserFile = File.createTempFile("textdesc", "typetree", null); File workingSchemaFile = File.createTempFile("textdesc", "schema", null); ls.inferRecordFormat(fs, inputPath, localFS, new Path(workingSchemaFile.getCanonicalPath()), new Path(workingParserFile.getCanonicalPath()), null, null, false, MAX_LINES); this.schema = Schema.parse(workingSchemaFile); DataInputStream in = new DataInputStream(localFS.open(new Path(workingParserFile.getCanonicalPath()))); try {//w w w . j av a 2 s . c om this.typeTree = InferredType.readType(in); } catch (IOException iex) { iex.printStackTrace(); throw iex; } finally { in.close(); } //System.err.println("Recovered unknowntext schema: " + schema); }
From source file:com.android.hierarchyviewerlib.device.DeviceBridge.java
public static PsdFile captureLayers(Window window) { DeviceConnection connection = null;/* www .j a va2 s .c o m*/ DataInputStream in = null; try { connection = new DeviceConnection(window.getDevice()); connection.sendCommand("CAPTURE_LAYERS " + window.encode()); //$NON-NLS-1$ in = new DataInputStream(new BufferedInputStream(connection.getSocket().getInputStream())); int width = in.readInt(); int height = in.readInt(); PsdFile psd = new PsdFile(width, height); while (readLayer(in, psd)) { } return psd; } catch (Exception e) { Log.e(TAG, "Unable to capture layers for window " + window.getTitle() + " on device " + window.getDevice()); } finally { if (in != null) { try { in.close(); } catch (Exception ex) { } } connection.close(); } return null; }
From source file:cloudMe.CloudMeAPI.java
private String getResponse(HttpResponse response) { try {/*from w w w.j a v a 2 s.c om*/ DataInputStream stream = new DataInputStream(response.getEntity().getContent()); StringBuffer buf = new StringBuffer(); String tmp; while ((tmp = stream.readLine()) != null) { buf.append(tmp); } stream.close(); return buf.toString(); } catch (IOException e) { return "IOException: " + e.getMessage(); } }
From source file:org.apache.hadoop.io.crypto.tool.CryptoApiTool.java
public void apiEncryption() { try {/*from w ww. j a v a 2s . c o m*/ Key key = initKey(strHexKey); CryptoContext cryptoContext = new CryptoContext(); cryptoContext.setKey(key); CryptoCodec cryptoCodec = (CryptoCodec) new AESCodec(); cryptoCodec.setCryptoContext(cryptoContext); Path inputFile = new Path(strInput); Path outputFile = new Path(strOutput); Configuration conf = new Configuration(); DataInputStream input = inputFile.getFileSystem(conf).open(inputFile); DataOutputStream outputStream = outputFile.getFileSystem(conf).create(outputFile); CompressionOutputStream output = cryptoCodec.createOutputStream(outputStream); writeStream(input, output); input.close(); output.close(); } catch (Exception e) { LOG.error("Encryption Fail:", e); throw new RuntimeException("Encryption Fail."); } }
From source file:com.pdfhow.diff.UploadServlet.java
/** * Handles the HTTP <code>GET</code> method. * * @param request/*from ww w . j ava2 s. c o m*/ * servlet request * @param response * servlet response * @throws ServletException * if a servlet-specific error occurs * @throws IOException * if an I/O error occurs */ @Override protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { if (request.getParameter("getfile") != null && !request.getParameter("getfile").isEmpty()) { File file = new File( request.getServletContext().getRealPath("/") + "imgs/" + request.getParameter("getfile")); if (file.exists()) { int bytes = 0; ServletOutputStream op = response.getOutputStream(); response.setContentType(getMimeType(file)); response.setContentLength((int) file.length()); response.setHeader("Content-Disposition", "inline; filename=\"" + file.getName() + "\""); byte[] bbuf = new byte[1024]; DataInputStream in = new DataInputStream(new FileInputStream(file)); while ((in != null) && ((bytes = in.read(bbuf)) != -1)) { op.write(bbuf, 0, bytes); } in.close(); op.flush(); op.close(); } } else if (request.getParameter("delfile") != null && !request.getParameter("delfile").isEmpty()) { File file = new File( request.getServletContext().getRealPath("/") + "imgs/" + request.getParameter("delfile")); if (file.exists()) { file.delete(); // TODO:check and report success } } else if (request.getParameter("getthumb") != null && !request.getParameter("getthumb").isEmpty()) { File file = new File( request.getServletContext().getRealPath("/") + "imgs/" + request.getParameter("getthumb")); if (file.exists()) { System.out.println(file.getAbsolutePath()); String mimetype = getMimeType(file); if (mimetype.endsWith("png") || mimetype.endsWith("jpeg") || mimetype.endsWith("jpg") || mimetype.endsWith("gif")) { BufferedImage im = ImageIO.read(file); if (im != null) { BufferedImage thumb = Scalr.resize(im, 75); ByteArrayOutputStream os = new ByteArrayOutputStream(); if (mimetype.endsWith("png")) { ImageIO.write(thumb, "PNG", os); response.setContentType("image/png"); } else if (mimetype.endsWith("jpeg")) { ImageIO.write(thumb, "jpg", os); response.setContentType("image/jpeg"); } else if (mimetype.endsWith("jpg")) { ImageIO.write(thumb, "jpg", os); response.setContentType("image/jpeg"); } else { ImageIO.write(thumb, "GIF", os); response.setContentType("image/gif"); } ServletOutputStream srvos = response.getOutputStream(); response.setContentLength(os.size()); response.setHeader("Content-Disposition", "inline; filename=\"" + file.getName() + "\""); os.writeTo(srvos); srvos.flush(); srvos.close(); } } } // TODO: check and report success } else { PrintWriter writer = response.getWriter(); writer.write("call POST with multipart form data"); } }
From source file:com.quigley.zabbixj.agent.active.ActiveThread.java
private JSONObject getResponse(byte[] responseBytes) throws Exception { byte[] sizeBuffer = new byte[8]; int index = 0; for (int i = 12; i > 4; i--) { sizeBuffer[index++] = responseBytes[i]; }/*from ww w . j a va2 s. com*/ ByteArrayInputStream bais = new ByteArrayInputStream(sizeBuffer); DataInputStream dis = new DataInputStream(bais); long size = dis.readLong(); dis.close(); bais.close(); byte[] jsonBuffer = new byte[responseBytes.length - 13]; if (jsonBuffer.length != size) { throw new ZabbixException("Reported and actual buffer sizes differ!"); } index = 0; for (int i = 13; i < responseBytes.length; i++) { jsonBuffer[index++] = responseBytes[i]; } JSONObject response = new JSONObject(new String(jsonBuffer)); return response; }
From source file:org.apache.hadoop.hdfs.server.namenode.metrics.TestNameNodeMetrics.java
private void readFile(FileSystem fileSys, Path name) throws IOException { //Just read file so that getNumBlockLocations are incremented DataInputStream stm = fileSys.open(name); byte[] buffer = new byte[4]; stm.read(buffer, 0, 4);/* w w w. ja v a 2 s . co m*/ stm.close(); }
From source file:com.buaa.cfs.security.token.Token.java
/** * Get the token identifier object, or null if it could not be constructed (because the class could not be loaded, * for example)./*from w w w .j a v a2s.c o m*/ * * @return the token identifier, or null * * @throws IOException */ @SuppressWarnings("unchecked") public T decodeIdentifier() throws IOException { Class<? extends TokenIdentifier> cls = getClassForIdentifier(getKind()); if (cls == null) { return null; } TokenIdentifier tokenIdentifier = ReflectionUtils.newInstance(cls, null); ByteArrayInputStream buf = new ByteArrayInputStream(identifier); DataInputStream in = new DataInputStream(buf); tokenIdentifier.readFields(in); in.close(); return (T) tokenIdentifier; }
From source file:org.apache.hama.bsp.message.compress.SnappyCompressor.java
/** * Decompresses a BSPCompressedBundle and returns the corresponding * BSPMessageBundle./*from www.j av a 2 s . co m*/ * * @param compMsgBundle * @return */ @Override public byte[] decompress(byte[] compressedBytes) { ByteArrayInputStream bis = null; SnappyInputStream sis = null; DataInputStream dis = null; byte[] bytes = null; try { bis = new ByteArrayInputStream(compressedBytes); sis = new SnappyInputStream(bis); dis = new DataInputStream(sis); bytes = IOUtils.toByteArray(dis); } catch (IOException ioe) { LOG.error("Unable to decompress.", ioe); } finally { try { dis.close(); sis.close(); bis.close(); } catch (IOException e) { LOG.warn("Failed to close decompression streams.", e); } } return bytes; }