List of usage examples for java.io ByteArrayInputStream close
public void close() throws IOException
From source file:com.espringtran.compressor4j.processor.TarGzProcessor.java
/** * Read from compressed file/*from ww w . j a v a 2 s .com*/ * * @param srcPath * path of compressed file * @param fileCompressor * FileCompressor object * @throws Exception */ @Override public void read(String srcPath, FileCompressor fileCompressor) throws Exception { long t1 = System.currentTimeMillis(); byte[] data = FileUtil.convertFileToByte(srcPath); ByteArrayInputStream bais = new ByteArrayInputStream(data); GzipCompressorInputStream cis = new GzipCompressorInputStream(bais); TarArchiveInputStream ais = new TarArchiveInputStream(cis); ByteArrayOutputStream baos = new ByteArrayOutputStream(); try { byte[] buffer = new byte[1024]; int readByte; TarArchiveEntry entry = ais.getNextTarEntry(); while (entry != null && entry.getSize() > 0) { long t2 = System.currentTimeMillis(); baos = new ByteArrayOutputStream(); readByte = ais.read(buffer); while (readByte != -1) { baos.write(buffer, 0, readByte); readByte = ais.read(buffer); } BinaryFile binaryFile = new BinaryFile(entry.getName(), baos.toByteArray()); fileCompressor.addBinaryFile(binaryFile); LogUtil.createAddFileLog(fileCompressor, binaryFile, t2, System.currentTimeMillis()); entry = ais.getNextTarEntry(); } } catch (Exception e) { FileCompressor.LOGGER.error("Error on get compressor file", e); } finally { baos.close(); ais.close(); cis.close(); bais.close(); } LogUtil.createReadLog(fileCompressor, srcPath, data.length, t1, System.currentTimeMillis()); }
From source file:com.espringtran.compressor4j.processor.TarBz2Processor.java
/** * Read from compressed file/*from w w w .j a v a 2 s .c om*/ * * @param srcPath * path of compressed file * @param fileCompressor * FileCompressor object * @throws Exception */ @Override public void read(String srcPath, FileCompressor fileCompressor) throws Exception { long t1 = System.currentTimeMillis(); byte[] data = FileUtil.convertFileToByte(srcPath); ByteArrayInputStream bais = new ByteArrayInputStream(data); BZip2CompressorInputStream cis = new BZip2CompressorInputStream(bais); TarArchiveInputStream ais = new TarArchiveInputStream(cis); ByteArrayOutputStream baos = new ByteArrayOutputStream(); try { byte[] buffer = new byte[1024]; int readByte; TarArchiveEntry entry = ais.getNextTarEntry(); while (entry != null && entry.getSize() > 0) { long t2 = System.currentTimeMillis(); baos = new ByteArrayOutputStream(); readByte = ais.read(buffer); while (readByte != -1) { baos.write(buffer, 0, readByte); readByte = ais.read(buffer); } BinaryFile binaryFile = new BinaryFile(entry.getName(), baos.toByteArray()); fileCompressor.addBinaryFile(binaryFile); LogUtil.createAddFileLog(fileCompressor, binaryFile, t2, System.currentTimeMillis()); entry = ais.getNextTarEntry(); } } catch (Exception e) { FileCompressor.LOGGER.error("Error on get compressor file", e); } finally { baos.close(); ais.close(); cis.close(); bais.close(); } LogUtil.createReadLog(fileCompressor, srcPath, data.length, t1, System.currentTimeMillis()); }
From source file:com.qubit.terra.docs.util.ReportGenerator.java
public byte[] generateReport() { contextMap.registerFieldsMetadata(); ByteArrayInputStream generatedReport = null; ByteArrayOutputStream outputStream = null; try {//from w w w. j a v a 2s . c om outputStream = new ByteArrayOutputStream(); final String freemarkerEngineKind = TemplateEngineKind.Freemarker.name(); IXDocReport report = XDocReport.loadReport(template, freemarkerEngineKind, fieldsMetadata, XDocReportRegistry.getRegistry()); for (ReportGeneratorPreProcessor preProcessor : preProcessors) { report.addPreprocessor(preProcessor.getEntryName(), preProcessor); } configureTemplateEngine(report.getTemplateEngine()); report.process(contextMap, outputStream); generatedReport = new ByteArrayInputStream(outputStream.toByteArray()); return convert(generatedReport); } catch (XDocReportException | IOException e) { e.printStackTrace(); throw new ReportGenerationException(e.getMessage(), e); } finally { if (generatedReport != null) { try { generatedReport.close(); } catch (IOException e) { throw new ReportGenerationException(e.getMessage(), e); } } if (outputStream != null) { try { outputStream.close(); } catch (IOException e) { throw new ReportGenerationException(e.getMessage(), e); } } } }
From source file:com.android.volley.cache.ACache.java
/** * ? Serializable?//from w w w .j a v a 2 s. c o m * * @param key * @return Serializable ? */ public CacheFeed getAsObject(String key) { BinaryShell shell = getAsBinary(key); if (shell == null) return null; byte[] data = shell.content; if (data != null) { ByteArrayInputStream bais = null; ObjectInputStream ois = null; try { bais = new ByteArrayInputStream(data); ois = new ObjectInputStream(bais); Object reObject = ois.readObject(); return new CacheFeed(reObject, shell.outOfDate); } catch (Exception e) { e.printStackTrace(); return null; } finally { try { if (bais != null) bais.close(); } catch (IOException e) { e.printStackTrace(); } try { if (ois != null) ois.close(); } catch (IOException e) { e.printStackTrace(); } } } return null; }
From source file:com.hp.application.automation.tools.results.RunResultRecorder.java
private boolean archiveFolder(FilePath reportFolder, String testStatus, FilePath archivedFile, TaskListener listener) throws IOException, InterruptedException { String archiveTestResultMode = _resultsPublisherModel.getArchiveTestResultsMode(); boolean archiveTestResult; archiveTestResult = isArchiveTestResult(testStatus, archiveTestResultMode); if (archiveTestResult) { if (reportFolder.exists()) { listener.getLogger().println("Zipping report folder: " + reportFolder); ByteArrayOutputStream outstr = new ByteArrayOutputStream(); reportFolder.zip(outstr);/*from www . j ava2s . com*/ /* * I did't use copyRecursiveTo or copyFrom due to * bug in * jekins:https://issues.jenkins-ci.org/browse * /JENKINS-9189 //(which is cleaimed to have been * fixed, but not. So I zip the folder to stream and * copy it to the master. */ ByteArrayInputStream instr = new ByteArrayInputStream(outstr.toByteArray()); archivedFile.copyFrom(instr); outstr.close(); instr.close(); return true; } else { listener.getLogger().println("No report folder was found in: " + reportFolder); } } return false; }
From source file:org.apache.hadoop.hbase.crosssite.CrossSiteZNodes.java
/** * Gets a cluster locator./*from w w w .j ava2 s . c o m*/ * * @param tableName * @return * @throws IOException * @throws KeeperException */ public ClusterLocator getClusterLocator(String tableName) throws IOException, KeeperException { byte[] clusterLocatorData = getClusterLocatorData(tableName); ByteArrayInputStream stream = null; try { stream = new ByteArrayInputStream(clusterLocatorData); DataInput in = new DataInputStream(stream); ClusterLocatorRPCObject locator = new ClusterLocatorRPCObject(); locator.readFields(in); return locator.getClusterLocator(); } finally { if (stream != null) { try { stream.close(); } catch (IOException e) { LOG.warn("Fail to close the stream of reading cluster locator", e); } } } }
From source file:org.apache.nutch.tools.FileDumper.java
/** * Dumps the reverse engineered raw content from the provided segment * directories if a parent directory contains more than one segment, otherwise * a single segment can be passed as an argument. * /*www. ja va 2s. c om*/ * @param outputDir * the directory you wish to dump the raw content to. This directory * will be created. * @param segmentRootDir * a directory containing one or more segments. * @param mimeTypes * an array of mime types we have to dump, all others will be * filtered out. * @param flatDir * a boolean flag specifying whether the output directory should contain * only files instead of using nested directories to prevent naming * conflicts. * @param mimeTypeStats * a flag indicating whether mimetype stats should be displayed * instead of dumping files. * @throws Exception */ public void dump(File outputDir, File segmentRootDir, String[] mimeTypes, boolean flatDir, boolean mimeTypeStats, boolean reverseURLDump) throws Exception { if (mimeTypes == null) LOG.info("Accepting all mimetypes."); // total file counts Map<String, Integer> typeCounts = new HashMap<>(); // filtered file counts Map<String, Integer> filteredCounts = new HashMap<>(); Configuration conf = NutchConfiguration.create(); int fileCount = 0; File[] segmentDirs = segmentRootDir.listFiles(file -> file.canRead() && file.isDirectory()); if (segmentDirs == null) { LOG.error("No segment directories found in [" + segmentRootDir.getAbsolutePath() + "]"); return; } for (File segment : segmentDirs) { LOG.info("Processing segment: [" + segment.getAbsolutePath() + "]"); DataOutputStream doutputStream = null; Map<String, String> filenameToUrl = new HashMap<String, String>(); File segmentDir = new File(segment.getAbsolutePath(), Content.DIR_NAME); File[] partDirs = segmentDir.listFiles(file -> file.canRead() && file.isDirectory()); if (partDirs == null) { LOG.warn("Skipping Corrupt Segment: [{}]", segment.getAbsolutePath()); continue; } for (File partDir : partDirs) { try (FileSystem fs = FileSystem.get(conf)) { String segmentPath = partDir + "/data"; Path file = new Path(segmentPath); if (!new File(file.toString()).exists()) { LOG.warn("Skipping segment: [" + segmentPath + "]: no data directory present"); continue; } SequenceFile.Reader reader = new SequenceFile.Reader(conf, SequenceFile.Reader.file(file)); Writable key = (Writable) reader.getKeyClass().newInstance(); Content content = null; while (reader.next(key)) { content = new Content(); reader.getCurrentValue(content); String url = key.toString(); String baseName = FilenameUtils.getBaseName(url); String extension = FilenameUtils.getExtension(url); if (extension == null || (extension != null && extension.equals(""))) { extension = "html"; } ByteArrayInputStream bas = null; Boolean filter = false; try { bas = new ByteArrayInputStream(content.getContent()); String mimeType = new Tika().detect(content.getContent()); collectStats(typeCounts, mimeType); if (mimeType != null) { if (mimeTypes == null || Arrays.asList(mimeTypes).contains(mimeType)) { collectStats(filteredCounts, mimeType); filter = true; } } } catch (Exception e) { e.printStackTrace(); LOG.warn("Tika is unable to detect type for: [" + url + "]"); } finally { if (bas != null) { try { bas.close(); } catch (Exception ignore) { } } } if (filter) { if (!mimeTypeStats) { String md5Ofurl = DumpFileUtil.getUrlMD5(url); String fullDir = outputDir.getAbsolutePath(); if (!flatDir && !reverseURLDump) { fullDir = DumpFileUtil.createTwoLevelsDirectory(fullDir, md5Ofurl); } if (!Strings.isNullOrEmpty(fullDir)) { String outputFullPath; if (reverseURLDump) { String[] reversedURL = TableUtil.reverseUrl(url).split(":"); reversedURL[0] = reversedURL[0].replace('.', '/'); String reversedURLPath = reversedURL[0] + "/" + DigestUtils.sha256Hex(url).toUpperCase(); outputFullPath = String.format("%s/%s", fullDir, reversedURLPath); // We'll drop the trailing file name and create the nested structure if it doesn't already exist. String[] splitPath = outputFullPath.split("/"); File fullOutputDir = new File(org.apache.commons.lang3.StringUtils .join(Arrays.copyOf(splitPath, splitPath.length - 1), "/")); if (!fullOutputDir.exists()) { fullOutputDir.mkdirs(); } } else { outputFullPath = String.format("%s/%s", fullDir, DumpFileUtil.createFileName(md5Ofurl, baseName, extension)); } filenameToUrl.put(outputFullPath, url); File outputFile = new File(outputFullPath); if (!outputFile.exists()) { LOG.info("Writing: [" + outputFullPath + "]"); // Modified to prevent FileNotFoundException (Invalid Argument) FileOutputStream output = null; try { output = new FileOutputStream(outputFile); IOUtils.write(content.getContent(), output); } catch (Exception e) { LOG.warn("Write Error: [" + outputFullPath + "]"); e.printStackTrace(); } finally { if (output != null) { output.flush(); try { output.close(); } catch (Exception ignore) { } } } fileCount++; } else { LOG.info("Skipping writing: [" + outputFullPath + "]: file already exists"); } } } } } reader.close(); } finally { if (doutputStream != null) { try { doutputStream.close(); } catch (Exception ignore) { } } } } //save filenameToUrl in a json file for each segment there is one mapping file String filenameToUrlFilePath = String.format("%s/%s_filenameToUrl.json", outputDir.getAbsolutePath(), segment.getName()); new ObjectMapper().writeValue(new File(filenameToUrlFilePath), filenameToUrl); } LOG.info("Dumper File Stats: " + DumpFileUtil.displayFileTypes(typeCounts, filteredCounts)); if (mimeTypeStats) { System.out.println("Dumper File Stats: " + DumpFileUtil.displayFileTypes(typeCounts, filteredCounts)); } }
From source file:IntSort.java
public void readStream() { try {//from w ww. ja v a 2 s .com // Careful: Make sure this is big enough! // Better yet, test and reallocate if necessary byte[] recData = new byte[50]; // Read from the specified byte array ByteArrayInputStream strmBytes = new ByteArrayInputStream(recData); // Read Java data types from the above byte array DataInputStream strmDataType = new DataInputStream(strmBytes); if (rs.getNumRecords() > 0) { ComparatorInt comp = new ComparatorInt(); int i = 1; RecordEnumeration re = rs.enumerateRecords(null, comp, false); while (re.hasNextElement()) { // Get data into the byte array rs.getRecord(re.nextRecordId(), recData, 0); // Read back the data types System.out.println("Record #" + i++); System.out.println("Name: " + strmDataType.readUTF()); System.out.println("Dog: " + strmDataType.readBoolean()); System.out.println("Rank: " + strmDataType.readInt()); System.out.println("--------------------"); // Reset so read starts at beginning of array strmBytes.reset(); } comp.compareIntClose(); // Free enumerator re.destroy(); } strmBytes.close(); strmDataType.close(); } catch (Exception e) { db(e.toString()); } }
From source file:com.inter.trade.view.slideplayview.util.AbFileUtil.java
/** * ??byte[]./*w w w .j a v a 2s . com*/ * @param imgByte byte[] * @param fileName ?????.jpg * @param type ???AbConstant * @param newWidth * @param newHeight * @return Bitmap */ public static Bitmap getBitmapFormByte(byte[] imgByte, String fileName, int type, int newWidth, int newHeight) { FileOutputStream fos = null; DataInputStream dis = null; ByteArrayInputStream bis = null; Bitmap b = null; File file = null; try { if (imgByte != null) { File sdcardDir = Environment.getExternalStorageDirectory(); String path = sdcardDir.getAbsolutePath() + downPathImageDir; file = new File(path + fileName); if (!file.getParentFile().exists()) { file.getParentFile().mkdirs(); } if (!file.exists()) { file.createNewFile(); } fos = new FileOutputStream(file); int readLength = 0; bis = new ByteArrayInputStream(imgByte); dis = new DataInputStream(bis); byte[] buffer = new byte[1024]; while ((readLength = dis.read(buffer)) != -1) { fos.write(buffer, 0, readLength); try { Thread.sleep(500); } catch (Exception e) { } } fos.flush(); b = getBitmapFromSD(file, type, newWidth, newHeight); } } catch (Exception e) { e.printStackTrace(); } finally { if (dis != null) { try { dis.close(); } catch (Exception e) { } } if (bis != null) { try { bis.close(); } catch (Exception e) { } } if (fos != null) { try { fos.close(); } catch (Exception e) { } } } return b; }
From source file:Base64.java
/** * Decodes data from Base64 notation, automatically * detecting gzip-compressed data and decompressing it. * * @param s the string to decode/*from w w w . ja v a 2s . c o m*/ * @return the decoded data * @since 1.4 */ public static byte[] decode(String s) { byte[] bytes; try { bytes = s.getBytes(PREFERRED_ENCODING); } // end try catch (java.io.UnsupportedEncodingException uee) { bytes = s.getBytes(); } // end catch //</change> // Decode bytes = decode(bytes, 0, bytes.length); // Check to see if it's gzip-compressed // GZIP Magic Two-Byte Number: 0x8b1f (35615) if (bytes != null && bytes.length >= 4) { int head = ((int) bytes[0] & 0xff) | ((bytes[1] << 8) & 0xff00); if (java.util.zip.GZIPInputStream.GZIP_MAGIC == head) { java.io.ByteArrayInputStream bais = null; java.util.zip.GZIPInputStream gzis = null; java.io.ByteArrayOutputStream baos = null; byte[] buffer = new byte[2048]; int length = 0; try { baos = new java.io.ByteArrayOutputStream(); bais = new java.io.ByteArrayInputStream(bytes); gzis = new java.util.zip.GZIPInputStream(bais); while ((length = gzis.read(buffer)) >= 0) { baos.write(buffer, 0, length); } // end while: reading input // No error? Get new bytes. bytes = baos.toByteArray(); } // end try catch (java.io.IOException e) { // Just return originally-decoded bytes } // end catch finally { try { baos.close(); } catch (Exception e) { } try { gzis.close(); } catch (Exception e) { } try { bais.close(); } catch (Exception e) { } } // end finally } // end if: gzipped } // end if: bytes.length >= 2 return bytes; }