Example usage for java.io BufferedOutputStream BufferedOutputStream

List of usage examples for java.io BufferedOutputStream BufferedOutputStream

Introduction

In this page you can find the example usage for java.io BufferedOutputStream BufferedOutputStream.

Prototype

public BufferedOutputStream(OutputStream out) 

Source Link

Document

Creates a new buffered output stream to write data to the specified underlying output stream.

Usage

From source file:com.tecapro.inventory.common.util.CommonUtil.java

/**
 * encodeBase64 object//from  w w  w . j  a  v  a  2 s  .  c  o  m
 *
 * @param obj
 * @return
 * @throws Exception
 */
public byte[] serialize(Object obj) throws Exception {
    ByteArrayOutputStream byteStream = null;
    ObjectOutputStream ostream = null;
    byte[] result = null;

    try {
        byteStream = new ByteArrayOutputStream();
        ostream = new ObjectOutputStream(new BufferedOutputStream(byteStream));
        ostream.writeObject(obj);
        ostream.flush();
        result = Base64.encodeBase64(byteStream.toByteArray());
    } finally {
        if (byteStream != null) {
            byteStream.close();
        }
        if (ostream != null) {
            ostream.close();
        }
    }
    return result;
}

From source file:com.sshdemo.common.report.manage.service.RepositoryService.java

@Override
public void publishRepository(List<Long> repositoryIds) {
    for (Long repositoryId : repositoryIds) {
        Repository repository = findRepositoryById(repositoryId);
        String type = repository.getType();
        byte[] bytes = repository.getEntity();
        String outputFile = repository.getName() + "." + type;

        //         Resource.Type resourceType = Resource.Type.ANNEX;
        //         if (type.toLowerCase().equals("png")) {
        //            resourceType = Resource.Type.IMAGE;
        //         }

        File file = null;/*from   ww  w .ja v  a 2  s  . com*/
        FileOutputStream fileStream = null;
        BufferedOutputStream bufferStream = null;
        try {
            file = new File(outputFile);
            fileStream = new FileOutputStream(file);
            bufferStream = new BufferedOutputStream(fileStream);
            bufferStream.write(bytes);
            //TODO ??

            //            resourceService.upload(site, file, outputFile, resourceType);

            repository.setPublishDate(new Date(Calendar.getInstance().getTime().getTime()));
            repositoryDAO.merge(repository);
        } catch (FileNotFoundException e) {
            e.printStackTrace();
        } catch (IOException e) {
            e.printStackTrace();
        } finally {
            if (fileStream != null) {
                try {
                    fileStream.close();
                } catch (IOException e) {
                }
                fileStream = null;
            }
            if (bufferStream != null) {
                try {
                    bufferStream.close();
                } catch (IOException e) {
                }
                bufferStream = null;
            }
            file = null;
        }
    }
}

From source file:com.digitalpebble.stormcrawler.elasticsearch.util.URLExtractor.java

URLExtractor(Map stormConf, String outfile, String boltType)
        throws FileNotFoundException, UnknownHostException {

    this.output = new BufferedOutputStream(new FileOutputStream(new File(outfile)));

    this.boltType = boltType;

    this.client = ElasticSearchConnection.getClient(stormConf, boltType);

    this.indexName = ConfUtils.getString(stormConf, "es." + boltType + ".index.name", "status");

    this.docType = ConfUtils.getString(stormConf, "es." + boltType + ".doc.type", "status");
}

From source file:jhttpp2.Jhttpp2HTTPSession.java

public Jhttpp2HTTPSession(Jhttpp2Server server, Socket client) {
    try {//from w w w . ja  v a2 s.c  o m
        in = new Jhttpp2ClientInputStream(server, this, client.getInputStream());// ,true);
        out = new BufferedOutputStream(client.getOutputStream());
        this.server = server;
        this.client = client;
    } catch (IOException e_io) {
        try {
            client.close();
        } catch (IOException e_io2) {
            log.debug("Error while closing client (kinda expected)" + e_io);
        }
        log.warn("Error while creating IO-Streams: ", e_io);
        return;
    }
    start();
}

From source file:gaffer.accumulo.utils.IngestUtils.java

/**
 * Given some split points, write a Base64 encoded splits file.
 * //from  w  w w  .  j  av a 2  s  .co m
 * @param splits  The split points
 * @param fs  The FileSystem in which to create the splits file
 * @param splitsFile  The location of the output splits file
 * @throws IOException
 */
public static void writeSplitsFile(Collection<Text> splits, FileSystem fs, Path splitsFile) throws IOException {
    PrintStream out = null;
    try {
        out = new PrintStream(new BufferedOutputStream(fs.create(splitsFile, true)));
        for (Text split : splits) {
            out.println(new String(Base64.encodeBase64(split.getBytes())));
        }
    } finally {
        IOUtils.closeStream(out);
    }
}

From source file:eu.eubrazilcc.lvl.core.io.FileCompressor.java

/**
 * Creates a tarball from the source directory and writes it into the target directory.
 * @param srcDir - directory whose files will be added to the tarball
 * @param targetName - directory where tarball will be written to
 * @throws IOException when an exception occurs on creating the tarball
 *///www.j  a v a  2  s .  c o  m
public static void tarGzipDir(final String srcDir, final String targetName) throws IOException {

    FileOutputStream fileOutputStream = null;
    BufferedOutputStream bufferedOutputStream = null;
    GzipCompressorOutputStream gzipOutputStream = null;
    TarArchiveOutputStream tarArchiveOutputStream = null;

    try {
        forceMkdir(new File(getFullPath(targetName)));
        fileOutputStream = new FileOutputStream(new File(targetName));
        bufferedOutputStream = new BufferedOutputStream(fileOutputStream);
        gzipOutputStream = new GzipCompressorOutputStream(bufferedOutputStream);
        tarArchiveOutputStream = new TarArchiveOutputStream(gzipOutputStream);

        addFilesInDirectory(tarArchiveOutputStream, srcDir);
    } finally {
        if (tarArchiveOutputStream != null) {
            tarArchiveOutputStream.finish();
        }
        if (tarArchiveOutputStream != null) {
            tarArchiveOutputStream.close();
        }
        if (gzipOutputStream != null) {
            gzipOutputStream.close();
        }
        if (bufferedOutputStream != null) {
            bufferedOutputStream.close();
        }
        if (fileOutputStream != null) {
            fileOutputStream.close();
        }
    }
}

From source file:com.cloudhopper.commons.io.demo.FileServerMain.java

private static void saveFileFromUrl(URL url, String path) throws Exception {
    URLConnection urlc = url.openConnection();
    BufferedInputStream bis = new BufferedInputStream(urlc.getInputStream());
    BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(new File(path)));
    int i;//www . jav  a  2 s .co  m
    while ((i = bis.read()) != -1) {
        bos.write(i);
    }
    bis.close();
    bos.close();
}

From source file:org.zols.documents.service.DocumentService.java

/**
 * Upload documents//from  w w  w.j ava 2 s.co m
 *
 * @param documentRepositoryName name of the repository
 * @param upload documents to be uploaded
 * @param rootFolderPath source path of the document
 */
public void upload(String documentRepositoryName, Upload upload, String rootFolderPath)
        throws DataStoreException {
    DocumentRepository documentRepository = documentRepositoryService.read(documentRepositoryName);
    String folderPath = documentRepository.getPath();
    if (rootFolderPath != null && rootFolderPath.trim().length() != 0) {
        folderPath = folderPath + File.separator + rootFolderPath;
    }
    List<MultipartFile> multipartFiles = upload.getFiles();
    if (null != multipartFiles && multipartFiles.size() > 0) {
        for (MultipartFile multipartFile : multipartFiles) {
            //Handle file content - multipartFile.getInputStream()
            byte[] bytes;
            try {
                bytes = multipartFile.getBytes();

                BufferedOutputStream stream = new BufferedOutputStream(new FileOutputStream(
                        new File(folderPath + File.separator + multipartFile.getOriginalFilename())));
                stream.write(bytes);
                stream.close();
            } catch (IOException ex) {
                java.util.logging.Logger.getLogger(DocumentService.class.getName()).log(Level.SEVERE, null, ex);
            }
        }
    }
}

From source file:azkaban.jobtype.ReportalHiveRunner.java

@Override
protected void runReportal() throws Exception {
    System.out.println("Reportal Hive: Setting up Hive");
    HiveConf conf = new HiveConf(SessionState.class);

    if (System.getenv("HADOOP_TOKEN_FILE_LOCATION") != null) {
        conf.set("mapreduce.job.credentials.binary", System.getenv("HADOOP_TOKEN_FILE_LOCATION"));
    }//ww w.j  a  v a2s. c  om

    File tempTSVFile = new File("./temp.tsv");
    OutputStream tsvTempOutputStream = new BoundedOutputStream(
            new BufferedOutputStream(new FileOutputStream(tempTSVFile)), outputCapacity);
    PrintStream logOut = System.out;

    // NOTE: It is critical to do this here so that log4j is reinitialized
    // before any of the other core hive classes are loaded
    // criccomini@linkedin.com: I disabled this because it appears to swallow
    // all future logging (even outside of hive).
    // SessionState.initHiveLog4j();

    String orig = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEAUXJARS);

    CliSessionState sessionState = new CliSessionState(conf);
    sessionState.in = System.in;
    sessionState.out = new PrintStream(tsvTempOutputStream, true, "UTF-8");
    sessionState.err = new PrintStream(logOut, true, "UTF-8");

    OptionsProcessor oproc = new OptionsProcessor();

    // Feed in Hive Args
    String[] args = buildHiveArgs();
    if (!oproc.process_stage1(args)) {
        throw new Exception("unable to parse options stage 1");
    }

    if (!oproc.process_stage2(sessionState)) {
        throw new Exception("unable to parse options stage 2");
    }

    // Set all properties specified via command line
    for (Map.Entry<Object, Object> item : sessionState.cmdProperties.entrySet()) {
        conf.set((String) item.getKey(), (String) item.getValue());
    }

    SessionState.start(sessionState);

    String expanded = expandHiveAuxJarsPath(orig);
    if (orig == null || orig.equals(expanded)) {
        System.out.println("Hive aux jars variable not expanded");
    } else {
        System.out.println("Expanded aux jars variable from [" + orig + "] to [" + expanded + "]");
        HiveConf.setVar(conf, HiveConf.ConfVars.HIVEAUXJARS, expanded);
    }

    if (!ShimLoader.getHadoopShims().usesJobShell()) {
        // hadoop-20 and above - we need to augment classpath using hiveconf
        // components
        // see also: code in ExecDriver.java
        ClassLoader loader = conf.getClassLoader();
        String auxJars = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEAUXJARS);

        System.out.println("Got auxJars = " + auxJars);

        if (StringUtils.isNotBlank(auxJars)) {
            loader = Utilities.addToClassPath(loader, StringUtils.split(auxJars, ","));
        }
        conf.setClassLoader(loader);
        Thread.currentThread().setContextClassLoader(loader);
    }

    CliDriver cli = new CliDriver();
    int returnValue = 0;
    String prefix = "";

    returnValue = cli.processLine("set hive.cli.print.header=true;");
    String[] queries = jobQuery.split("\n");
    for (String line : queries) {
        if (!prefix.isEmpty()) {
            prefix += '\n';
        }
        if (line.trim().endsWith(";") && !line.trim().endsWith("\\;")) {
            line = prefix + line;
            line = injectVariables(line);
            System.out.println("Reportal Hive: Running Hive Query: " + line);
            System.out.println("Reportal Hive: HiveConf HIVEAUXJARS: "
                    + HiveConf.getVar(conf, HiveConf.ConfVars.HIVEAUXJARS));
            returnValue = cli.processLine(line);
            prefix = "";
        } else {
            prefix = prefix + line;
            continue;
        }
    }

    tsvTempOutputStream.close();

    // convert tsv to csv and write it do disk
    System.out.println("Reportal Hive: Converting output");
    InputStream tsvTempInputStream = new BufferedInputStream(new FileInputStream(tempTSVFile));
    Scanner rowScanner = new Scanner(tsvTempInputStream);
    PrintStream csvOutputStream = new PrintStream(outputStream);
    while (rowScanner.hasNextLine()) {
        String tsvLine = rowScanner.nextLine();
        // strip all quotes, and then quote the columns
        csvOutputStream.println("\"" + tsvLine.replace("\"", "").replace("\t", "\",\"") + "\"");
    }
    rowScanner.close();
    csvOutputStream.close();

    // Flush the temp file out
    tempTSVFile.delete();

    if (returnValue != 0) {
        throw new Exception("Hive query finished with a non zero return code");
    }

    System.out.println("Reportal Hive: Ended successfully");
}

From source file:app.service.ResourceService.java

public int upload(MultipartFile file, int resourceType, StringResource resource) {
    if (file.isEmpty()) {
        return ResultCode.FILE_EMPTY;
    }/*from   ww w.  j  a  va  2  s.  c  o  m*/

    String path;
    if (resourceType == RESOURCE_TYPE_AVATAR) {
        path = RESOURCE_AVATAR_PATH;
    } else if (resourceType == RESOURCE_TYPE_COMMON) {
        path = RESOURCE_COMMON_PATH;
    } else {
        return ResultCode.UNKNOWN_RESOURCE;
    }

    resolvePath(path);
    String filename = resolveFilename(file.getOriginalFilename());
    try {
        OutputStream out = new FileOutputStream(new File(path + "/" + filename));
        BufferedOutputStream stream = new BufferedOutputStream(out);
        FileCopyUtils.copy(file.getInputStream(), stream);
        stream.close();
        resource.filename = filename;
    } catch (Exception e) {
        logger.warn("upload file failure", e);
        return ResultCode.UPLOAD_FILE_FAILED;
    }
    return BaseResponse.COMMON_SUCCESS;
}