List of usage examples for java.io BufferedOutputStream BufferedOutputStream
public BufferedOutputStream(OutputStream out)
From source file:com.twitter.heron.apiserver.utils.FileHelper.java
public static boolean createTarGz(File archive, File... files) { try (FileOutputStream fileOutputStream = new FileOutputStream(archive); BufferedOutputStream bufferedOutputStream = new BufferedOutputStream(fileOutputStream); GzipCompressorOutputStream gzipOuputStream = new GzipCompressorOutputStream(bufferedOutputStream); TarArchiveOutputStream archiveOutputStream = new TarArchiveOutputStream(gzipOuputStream)) { for (File file : files) { addFileToArchive(archiveOutputStream, file, ""); }/*from w ww . ja v a 2s . c om*/ archiveOutputStream.finish(); } catch (IOException ioe) { LOG.error("Failed to create archive {} file.", archive, ioe); return false; } return true; }
From source file:com.ibm.util.merge.storage.TarArchive.java
@Override public void openOutputStream() throws IOException { FileOutputStream fos = new FileOutputStream(getFilePath()); BufferedOutputStream bos = new BufferedOutputStream(fos); this.setOutputStream(new TarArchiveOutputStream(bos)); }
From source file:br.univali.celine.lms.utils.zip.Zip.java
public void zip(ArrayList<String> fileList, File destFile, int compressionLevel) throws Exception { if (destFile.exists()) throw new Exception("File " + destFile.getName() + " already exists!!"); int fileLength; byte[] buffer = new byte[4096]; FileOutputStream fos = new FileOutputStream(destFile); BufferedOutputStream bos = new BufferedOutputStream(fos); ZipOutputStream zipFile = new ZipOutputStream(bos); zipFile.setLevel(compressionLevel);/*w w w . ja v a 2s . c om*/ for (int i = 0; i < fileList.size(); i++) { FileInputStream fis = new FileInputStream(fileList.get(i)); BufferedInputStream bis = new BufferedInputStream(fis); ZipEntry ze = new ZipEntry(FilenameUtils.getName(fileList.get(i))); zipFile.putNextEntry(ze); while ((fileLength = bis.read(buffer, 0, 4096)) > 0) zipFile.write(buffer, 0, fileLength); zipFile.closeEntry(); bis.close(); } zipFile.close(); }
From source file:com.anthony.forumspring.controller.FileUploadController.java
@RequestMapping(value = "/uploadFile", method = RequestMethod.POST) public @ResponseBody String uploadFildHander(@RequestParam(value = "name") String name, @RequestParam("file") MultipartFile file) { if (!file.isEmpty()) { try {// www . jav a 2 s . com byte[] bytes = file.getBytes(); // Creating the directory to store file String rootPath = System.getProperty("catalina.home"); File dir = new File(rootPath + File.separator + "tmpFiles"); if (!dir.exists()) { dir.mkdirs(); } // Create the file on server File serverFile = new File(dir.getAbsolutePath() + File.separator + name); BufferedOutputStream stream = new BufferedOutputStream(new FileOutputStream(serverFile)); stream.write(bytes); stream.close(); //logger.info("Server File Location=" // + serverFile.getAbsolutePath()); return "You successfully uploaded file=" + name; } catch (Exception e) { return "You failed to upload " + name + " => " + e.getMessage(); } } else { return "You failed to upload " + name + " because the file was empty."; } }
From source file:org.canova.api.records.reader.impl.SVMRecordWriterTest.java
@Test public void testWriter() throws Exception { InputStream is = new ClassPathResource("iris.dat").getInputStream(); assumeNotNull(is);/*from w w w. j av a 2 s .com*/ File tmp = new File("iris.txt"); BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(tmp)); IOUtils.copy(is, bos); bos.flush(); bos.close(); InputSplit split = new FileSplit(tmp); tmp.deleteOnExit(); RecordReader reader = new CSVRecordReader(); List<Collection<Writable>> records = new ArrayList<>(); reader.initialize(split); while (reader.hasNext()) { Collection<Writable> record = reader.next(); assertEquals(5, record.size()); records.add(record); } assertEquals(150, records.size()); File out = new File("iris_out.txt"); out.deleteOnExit(); RecordWriter writer = new SVMLightRecordWriter(out, true); for (Collection<Writable> record : records) writer.write(record); writer.close(); records.clear(); RecordReader svmReader = new SVMLightRecordReader(); InputSplit svmSplit = new FileSplit(out); svmReader.initialize(svmSplit); assertTrue(svmReader.hasNext()); while (svmReader.hasNext()) { Collection<Writable> record = svmReader.next(); assertEquals(5, record.size()); records.add(record); } assertEquals(150, records.size()); }
From source file:org.slc.sli.ingestion.IngestionTest.java
public static OutputStream createFileOutputStream(String filePath) throws IOException { File file = new File(filePath); return new BufferedOutputStream(new FileOutputStream(file)); }
From source file:com.jaxio.celerio.output.ZipOutputResult.java
@Override public void open() throws IOException { if (isOpen) { return;/*from ww w . j a v a 2 s. c o m*/ } bufferedOutputStream = new BufferedOutputStream(new FileOutputStream(new File(filename))); zipOutputStream = new JarOutputStream(bufferedOutputStream); isOpen = true; }
From source file:com.googlecode.lineblog.websocket.v2.TokenThread.java
public TokenThread(Socket socket) throws IOException { if (socket == null) throw new RuntimeException("socket is not null!"); this.socket = socket; this.in = new BufferedInputStream(this.socket.getInputStream()); this.out = new BufferedOutputStream(this.socket.getOutputStream()); this.status = this.accept();//? }
From source file:com.cloudera.nav.sdk.client.writer.MetadataWriterFactory.java
/** * Create a new metadata writer//from w ww . j av a 2s.c om */ public MetadataWriter newWriter() { try { HttpURLConnection conn = createHttpStream(); OutputStream stream = new BufferedOutputStream(conn.getOutputStream()); return new JsonMetadataWriter(config, stream, conn); } catch (IOException e) { throw Throwables.propagate(e); } }
From source file:logicProteinHypernetwork.analysis.complexes.offline.ComplexPredictionCommand.java
/** * Returns predicted complexes for a given undirected graph of proteins. * * @param g the graph/*from w ww .j av a 2 s .c o m*/ * @return the predicted complexes */ public Collection<Complex> transform(UndirectedGraph<Protein, Interaction> g) { try { Process p = Runtime.getRuntime().exec(command); BufferedOutputStream outputStream = new BufferedOutputStream(p.getOutputStream()); BufferedInputStream inputStream = new BufferedInputStream(p.getInputStream()); for (Interaction i : g.getEdges()) { String out = i.first() + " pp " + i.second(); outputStream.write(out.getBytes()); } outputStream.close(); p.waitFor(); if (!hypernetwork.getProteins().hasIndex()) { hypernetwork.getProteins().buildIndex(); } Collection<Complex> complexes = new ArrayList<Complex>(); Scanner s = new Scanner(inputStream); Pattern rowPattern = Pattern.compile(".*?\\n"); Pattern proteinPattern = Pattern.compile(".*?\\s"); while (s.hasNext(rowPattern)) { Complex c = new Complex(); while (s.hasNext(proteinPattern)) { c.add(hypernetwork.getProteins().getProteinById(s.next(proteinPattern).trim())); } complexes.add(c); } inputStream.close(); return complexes; } catch (InterruptedException ex) { Logger.getLogger(ComplexPredictionCommand.class.getName()).log(Level.SEVERE, null, ex); } catch (IOException ex) { Logger.getLogger(ComplexPredictionCommand.class.getName()).log(Level.SEVERE, null, ex); } return null; }