Example usage for java.io BufferedOutputStream BufferedOutputStream

List of usage examples for java.io BufferedOutputStream BufferedOutputStream

Introduction

In this page you can find the example usage for java.io BufferedOutputStream BufferedOutputStream.

Prototype

public BufferedOutputStream(OutputStream out, int size) 

Source Link

Document

Creates a new buffered output stream to write data to the specified underlying output stream with the specified buffer size.

Usage

From source file:Main.java

private static List<Integer> getAllRelatedPids(final int pid) {
    List<Integer> result = new ArrayList<Integer>(Arrays.asList(pid));
    // use 'ps' to get this pid and all pids that are related to it (e.g.
    // spawned by it)
    try {/*  w  w  w . ja v a2  s  . co m*/

        final Process suProcess = Runtime.getRuntime().exec("su");

        new Thread(new Runnable() {

            @Override
            public void run() {
                PrintStream outputStream = null;
                try {
                    outputStream = new PrintStream(new BufferedOutputStream(suProcess.getOutputStream(), 8192));
                    outputStream.println("ps");
                    outputStream.println("exit");
                    outputStream.flush();
                } finally {
                    if (outputStream != null) {
                        outputStream.close();
                    }
                }

            }
        }).run();

        if (suProcess != null) {
            try {
                suProcess.waitFor();
            } catch (InterruptedException e) {
                e.printStackTrace();
            }
        }

        BufferedReader bufferedReader = null;
        try {
            bufferedReader = new BufferedReader(new InputStreamReader(suProcess.getInputStream()), 8192);
            while (bufferedReader.ready()) {
                String[] line = SPACES_PATTERN.split(bufferedReader.readLine());
                if (line.length >= 3) {
                    try {
                        if (pid == Integer.parseInt(line[2])) {
                            result.add(Integer.parseInt(line[1]));
                        }
                    } catch (NumberFormatException ignore) {
                    }
                }
            }
        } finally {
            if (bufferedReader != null) {
                bufferedReader.close();
            }
        }
    } catch (IOException e1) {
        e1.printStackTrace();
    }

    return result;
}

From source file:com.ibm.research.rdf.store.cmd.GeneratePredicateMappings.java

@Override
public void doWork(Connection conn) {
    // TODO Auto-generated method stub
    try {/*from   w  ww . j  a va  2s .co  m*/

        PrintStream ps = new PrintStream(new BufferedOutputStream(System.out, 1000000));

        StoreManager.generatePredicateMappings(conn, Backend.valueOf(params.get("-backend")),
                params.get("-schema"), storeName, ps, Context.defaultContext);
        ps.close();

    } catch (RdfStoreException e) {
        log.error(e);
        System.out.println(e.getLocalizedMessage());
    } catch (Exception e) {
        log.error(e);
        System.out.println(e.getLocalizedMessage());
    }
}

From source file:com.aliyun.odps.ship.download.TextRecordWriter.java

public TextRecordWriter(File file, String fd, String rd) throws FileNotFoundException {

    this.os = new CountingOutputStream(new BufferedOutputStream(new FileOutputStream(file), BUFFER_SIZE));
    this.fd = fd.getBytes();
    this.rd = rd.getBytes();
}

From source file:Main.java

/**
 * Extract a zip resource into real files and directories
 * /*w  w  w . j a va2s . c  o  m*/
 * @param in typically given as getResources().openRawResource(R.raw.something)
 * @param directory target directory
 * @param overwrite indicates whether to overwrite existing files
 * @return list of files that were unpacked (if overwrite is false, this list won't include files
 *         that existed before)
 * @throws IOException
 */
public static List<File> extractZipResource(InputStream in, File directory, boolean overwrite)
        throws IOException {
    final int BUFSIZE = 2048;
    byte buffer[] = new byte[BUFSIZE];
    ZipInputStream zin = new ZipInputStream(new BufferedInputStream(in, BUFSIZE));
    List<File> files = new ArrayList<File>();
    ZipEntry entry;
    directory.mkdirs();
    while ((entry = zin.getNextEntry()) != null) {
        File file = new File(directory, entry.getName());
        files.add(file);
        if (overwrite || !file.exists()) {
            if (entry.isDirectory()) {
                file.mkdirs();
            } else {
                file.getParentFile().mkdirs(); // Necessary because some zip files lack directory entries.
                BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(file), BUFSIZE);
                int nRead;
                while ((nRead = zin.read(buffer, 0, BUFSIZE)) > 0) {
                    bos.write(buffer, 0, nRead);
                }
                bos.flush();
                bos.close();
            }
        }
    }
    zin.close();
    return files;
}

From source file:de.suse.swamp.util.FileUtils.java

/**
 * Decompress the provided Stream to "targetPath"
 *///from ww w.j  ava2  s. c  o  m
public static void uncompress(InputStream inputFile, String targetPath) throws Exception {
    ZipInputStream zin = new ZipInputStream(new BufferedInputStream(inputFile));
    BufferedOutputStream dest = null;
    ZipEntry entry;
    while ((entry = zin.getNextEntry()) != null) {
        int count;
        byte data[] = new byte[2048];
        // write the files to the disk
        if (entry.isDirectory()) {
            org.apache.commons.io.FileUtils.forceMkdir(new File(targetPath + "/" + entry.getName()));
        } else {
            FileOutputStream fos = new FileOutputStream(targetPath + "/" + entry.getName());
            dest = new BufferedOutputStream(fos, 2048);
            while ((count = zin.read(data, 0, 2048)) != -1) {
                dest.write(data, 0, count);
            }
            dest.flush();
            dest.close();
        }
    }
}

From source file:net.gleamynode.oil.impl.wal.store.FileLogWriter.java

public FileLogWriter(ClassCatalog catalog, OutputStream out, int maxLogSize) throws IOException {
    this.out = new BufferedOutputStream(out, FileLogStoreConstants.DEFAULT_IO_BUFFER_SIZE);
    this.bufOut = new CompactObjectOutputStream(catalog, buf);
    this.maxLogSize = maxLogSize;
}

From source file:c3.ops.priam.compress.SnappyCompression.java

private void decompress(InputStream input, OutputStream output) throws IOException {
    SnappyInputStream is = new SnappyInputStream(new BufferedInputStream(input));
    byte data[] = new byte[BUFFER];
    BufferedOutputStream dest1 = new BufferedOutputStream(output, BUFFER);
    try {/*from   w w  w .j  a v a 2 s.co m*/
        int c;
        while ((c = is.read(data, 0, BUFFER)) != -1) {
            dest1.write(data, 0, c);
        }
    } finally {
        IOUtils.closeQuietly(dest1);
        IOUtils.closeQuietly(is);
    }
}

From source file:Main.java

public static Bitmap loadBitmap(String url, BitmapFactory.Options opt) {
    Bitmap bitmap = null;//from   w w  w  . j av a 2s .c om
    InputStream is = null;
    BufferedOutputStream bos = null;
    try {
        is = new BufferedInputStream(new URL(url).openStream(), IO_BUFFER_SIZE);
        final ByteArrayOutputStream baos = new ByteArrayOutputStream();
        bos = new BufferedOutputStream(baos, IO_BUFFER_SIZE);
        copyStream(is, bos);
        bos.flush();
        final byte[] data = baos.toByteArray();
        bitmap = BitmapFactory.decodeByteArray(data, 0, data.length, opt);
    } catch (IOException e) {
        // TODO Auto-generated catch block
        Log.e(TAG, "IOException in loadBitmap");
        e.printStackTrace();
    } catch (OutOfMemoryError e) {
        // TODO Auto-generated catch block
        Log.e(TAG, "OutOfMemoryError in loadBitmap");
        e.printStackTrace();
    } finally {
        closeStream(is);
        closeStream(bos);
    }
    return bitmap;
}

From source file:com.ibm.research.rdf.store.cmd.DumpRdfStore.java

@Override
public void doWork(Connection conn) {

    // create the store.
    try {/*from   ww  w. jav  a  2s . co  m*/

        Store store = StoreManager.connectStore(conn, Backend.valueOf(params.get("-backend")),
                params.get("-schema"), storeName, Context.defaultContext);
        Dataset ds = RdfStoreFactory.connectDataset(store, conn, Backend.valueOf(params.get("-backend")));

        PrintStream ps = new PrintStream(new BufferedOutputStream(System.out, 1000000));
        RiotWriter.writeNQuads(ps, ds.asDatasetGraph());
        ps.close();
    } catch (RdfStoreException e) {
        log.error(e);
        System.out.println(e.getLocalizedMessage());
    } catch (Exception e) {
        log.error(e);
        System.out.println(e.getLocalizedMessage());
    }
}

From source file:io.ecarf.core.cloud.task.processor.reason.phase2.ReasonUtils.java

/**
 * // www  .  j  ava 2  s. c om
 * @param file
 * @param writer
 * @param compressed
 * @return
 * @throws IOException 
 */
public static int reason(String inFile, String outFile, boolean compressed, Map<Long, Set<Triple>> schemaTerms,
        Set<Long> productiveTerms) throws IOException {

    log.info("Reasoning for file: " + inFile + ", memory usage: " + Utils.getMemoryUsageInGB() + "GB");

    int inferredTriples = 0;

    // loop through the instance triples probably stored in a file and generate all the triples matching the schema triples set
    try (BufferedReader reader = getQueryResultsReader(inFile, compressed);
            PrintWriter writer = new PrintWriter(new BufferedOutputStream(
                    new GZIPOutputStream(new FileOutputStream(outFile), Constants.GZIP_BUF_SIZE),
                    Constants.GZIP_BUF_SIZE));) {

        Iterable<CSVRecord> records;

        if (compressed) {
            // ignore first row subject,predicate,object
            records = CSVFormat.DEFAULT.withHeader().withSkipHeaderRecord().parse(reader);

        } else {
            records = CSVFormat.DEFAULT.parse(reader);
        }

        Long term;

        for (CSVRecord record : records) {

            ETriple instanceTriple = ETriple.fromCSV(record.values());

            // TODO review for OWL ruleset
            if (SchemaURIType.RDF_TYPE.id == instanceTriple.getPredicate()) {

                term = instanceTriple.getObject(); // object

            } else {

                term = instanceTriple.getPredicate(); // predicate
            }

            Set<Triple> schemaTriples = schemaTerms.get(term);

            if ((schemaTriples != null) && !schemaTriples.isEmpty()) {
                productiveTerms.add(term);

                for (Triple schemaTriple : schemaTriples) {
                    Rule rule = GenericRule.getRule(schemaTriple);
                    Triple inferredTriple = rule.head(schemaTriple, instanceTriple);

                    if (inferredTriple != null) {
                        writer.println(inferredTriple.toCsv());
                        inferredTriples++;
                    }
                }
            }

        }

    }

    return inferredTriples;
}