Example usage for com.google.common.io ByteStreams copy

List of usage examples for com.google.common.io ByteStreams copy

Introduction

In this page you can find the example usage for com.google.common.io ByteStreams copy.

Prototype

public static long copy(ReadableByteChannel from, WritableByteChannel to) throws IOException 

Source Link

Document

Copies all bytes from the readable channel to the writable channel.

Usage

From source file:co.cask.common.http.HttpRequests.java

/**
 * Executes an HTTP request to the url provided.
 *
 * @param request HTTP request to execute
 * @param requestConfig configuration for the HTTP request to execute
 * @return HTTP response/*from  ww w  .  j a  v  a  2 s  .c o m*/
 */
public static HttpResponse execute(HttpRequest request, HttpRequestConfig requestConfig) throws IOException {
    String requestMethod = request.getMethod().name();
    URL url = request.getURL();

    HttpURLConnection conn = (HttpURLConnection) url.openConnection();
    conn.setRequestMethod(requestMethod);
    conn.setReadTimeout(requestConfig.getReadTimeout());
    conn.setConnectTimeout(requestConfig.getConnectTimeout());

    Multimap<String, String> headers = request.getHeaders();
    if (headers != null && !headers.isEmpty()) {
        for (Map.Entry<String, String> header : headers.entries()) {
            conn.setRequestProperty(header.getKey(), header.getValue());
        }
    }

    InputSupplier<? extends InputStream> bodySrc = request.getBody();
    if (bodySrc != null) {
        conn.setDoOutput(true);
        Long bodyLength = request.getBodyLength();
        if (bodyLength != null) {
            // use intValue to support 1.6
            if (bodyLength > requestConfig.getFixedLengthStreamingThreshold()) {
                conn.setFixedLengthStreamingMode(bodyLength.intValue());
            }
        } else {
            conn.setChunkedStreamingMode(0);
        }
    }

    if (conn instanceof HttpsURLConnection && !requestConfig.isVerifySSLCert()) {
        // Certificate checks are disabled for HTTPS connection.
        LOG.debug("Disabling SSL certificate check for {}", request.getURL());
        try {
            disableCertCheck((HttpsURLConnection) conn);
        } catch (Exception e) {
            LOG.error("Got exception while disabling SSL certificate check for {}", request.getURL());
        }
    }

    conn.connect();

    try {
        if (bodySrc != null) {
            OutputStream os = conn.getOutputStream();
            try {
                ByteStreams.copy(bodySrc, os);
            } finally {
                os.close();
            }
        }

        try {
            if (isSuccessful(conn.getResponseCode())) {
                return new HttpResponse(conn.getResponseCode(), conn.getResponseMessage(),
                        ByteStreams.toByteArray(conn.getInputStream()), conn.getHeaderFields());
            }
        } catch (FileNotFoundException e) {
            // Server returns 404. Hence handle as error flow below. Intentional having empty catch block.
        }

        // Non 2xx response
        InputStream es = conn.getErrorStream();
        byte[] content = (es == null) ? new byte[0] : ByteStreams.toByteArray(es);
        return new HttpResponse(conn.getResponseCode(), conn.getResponseMessage(), content,
                conn.getHeaderFields());
    } finally {
        conn.disconnect();
    }
}

From source file:org.obiba.mica.file.service.TempFileService.java

@NotNull
public TempFile addTempFile(@NotNull TempFile tempFile, @NotNull InputStream uploadedInputStream)
        throws IOException {
    TempFile savedTempFile;//ww  w. j  ava2 s  .  c  o  m
    if (tempFile.getId() != null) {
        savedTempFile = tempFileRepository.findOne(tempFile.getId());
        if (savedTempFile == null) {
            savedTempFile = tempFileRepository.save(tempFile);
        }
    } else {
        savedTempFile = tempFileRepository.save(tempFile);
    }

    File file = getFile(savedTempFile.getId());
    OutputStream fileOut = new FileOutputStream(file);
    ByteStreams.copy(uploadedInputStream, fileOut);
    fileOut.close();
    savedTempFile.setSize(file.length());
    savedTempFile.setMd5(Files.hash(file, Hashing.md5()).toString());
    tempFileRepository.save(savedTempFile);
    return savedTempFile;
}

From source file:io.druid.segment.data.EntireLayoutDoubleSupplierSerializer.java

@Override
public void writeToChannel(WritableByteChannel channel, FileSmoosher smoosher) throws IOException {
    try (InputStream meta = ioPeon.makeInputStream(metaFile);
            InputStream value = ioPeon.makeInputStream(valueFile)) {
        ByteStreams.copy(Channels.newChannel(meta), channel);
        ByteStreams.copy(Channels.newChannel(value), channel);
    }/*from   w  w w  . j a  v a 2s . co m*/
}

From source file:org.diqube.itest.util.Zip.java

/**
 * Create a .zip file of all the contents of the source directory.
 *//* w  w  w .  ja va  2  s . com*/
public void zip(File sourceDirectory, File targetFile) {
    if (!sourceDirectory.exists() || !sourceDirectory.isDirectory())
        throw new RuntimeException(
                "Source directory is no valid directory: " + sourceDirectory.getAbsolutePath());

    if (targetFile.exists() && targetFile.isDirectory())
        throw new RuntimeException("Target file exists and is a directory: " + targetFile.getAbsolutePath());

    if (targetFile.exists())
        if (!targetFile.delete())
            throw new RuntimeException(
                    "Target file exists but could not be deleted: " + targetFile.getAbsolutePath());

    Path sourcePath = sourceDirectory.toPath();
    try (OutputStream os = new BufferedOutputStream(new FileOutputStream(targetFile))) {
        try (ZipOutputStream zos = new ZipOutputStream(os)) {
            Files.walkFileTree(sourcePath, new FileVisitor<Path>() {
                @Override
                public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs)
                        throws IOException {
                    return FileVisitResult.CONTINUE;
                }

                @Override
                public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
                    String name = sourcePath.relativize(file).toString();

                    zos.putNextEntry(new ZipEntry(name));
                    try (FileInputStream fis = new FileInputStream(file.toFile())) {
                        ByteStreams.copy(fis, zos);
                    }
                    zos.closeEntry();

                    return FileVisitResult.CONTINUE;
                }

                @Override
                public FileVisitResult visitFileFailed(Path file, IOException exc) throws IOException {
                    logger.warn("Visit file failed: {}", file, exc);
                    return FileVisitResult.CONTINUE;
                }

                @Override
                public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException {
                    return FileVisitResult.CONTINUE;
                }
            });
        }
    } catch (IOException e) {
        throw new RuntimeException("Could not zip result", e);
    }
}

From source file:org.jclouds.examples.rackspace.cloudfiles.GetObject.java

private void writeObject(SwiftObject swiftObject) throws IOException {
    System.out.format("Write Object%n");

    InputStream inputStream = swiftObject.getPayload().openStream();
    File file = File.createTempFile("uploadObjectFromFile", ".txt");
    BufferedOutputStream outputStream = new BufferedOutputStream(new FileOutputStream(file));

    try {//  w  w  w .  j  av  a2  s .c o m
        ByteStreams.copy(inputStream, outputStream);
    } finally {
        inputStream.close();
        outputStream.close();
    }

    System.out.format("  %s%n", file.getAbsolutePath());
}

From source file:org.jclouds.glacier.util.TreeHash.java

/**
 * Builds the Hash and the TreeHash values of the payload.
 *
 * @return The calculated TreeHash.//from ww  w . j  av  a 2  s. co m
 * @see <a href="http://docs.aws.amazon.com/amazonglacier/latest/dev/checksum-calculations.html" />
 */
public static TreeHash buildTreeHashFromPayload(Payload payload) throws IOException {
    InputStream is = null;
    try {
        is = checkNotNull(payload, "payload").openStream();
        Builder<HashCode> list = ImmutableList.builder();
        HashingInputStream linearHis = new HashingInputStream(Hashing.sha256(), is);
        while (true) {
            HashingInputStream chunkedHis = new HashingInputStream(Hashing.sha256(),
                    ByteStreams.limit(linearHis, CHUNK_SIZE));
            long count = ByteStreams.copy(chunkedHis, ByteStreams.nullOutputStream());
            if (count == 0) {
                break;
            }
            list.add(chunkedHis.hash());
        }
        //The result list contains exactly one element now.
        return new TreeHash(hashList(list.build()), linearHis.hash());
    } finally {
        closeQuietly(is);
    }
}

From source file:org.zalando.logbook.servlet.example.ExampleController.java

@RequestMapping(value = "/stream", produces = MediaType.TEXT_PLAIN_VALUE)
public void stream(final HttpServletRequest request, final HttpServletResponse response) throws IOException {
    ByteStreams.copy(request.getInputStream(), response.getOutputStream());
}

From source file:co.cask.cdap.test.internal.DefaultProcedureClient.java

@Override
public byte[] queryRaw(String method, Map<String, String> arguments) throws IOException {
    Discoverable discoverable = discoveryServiceClient
            .discover(String.format("procedure.%s.%s.%s", accountId, applicationId, procedureName)).iterator()
            .next();//from w  w  w  .jav a  2 s .c  o  m

    URL url = new URL(String.format("http://%s:%d/apps/%s/procedures/%s/methods/%s",
            discoverable.getSocketAddress().getHostName(), discoverable.getSocketAddress().getPort(),
            applicationId, procedureName, method));
    HttpURLConnection urlConn = (HttpURLConnection) url.openConnection();
    urlConn.setDoOutput(true);
    JsonWriter writer = new JsonWriter(new OutputStreamWriter(urlConn.getOutputStream(), Charsets.UTF_8));
    try {
        new Gson().toJson(arguments, new TypeToken<Map<String, String>>() {
        }.getType(), writer);
    } finally {
        writer.close();
    }
    if (urlConn.getResponseCode() != 200) {
        throw new IOException("Response code != 200 (responded = " + urlConn.getResponseCode() + " "
                + urlConn.getResponseMessage() + ")");
    }
    ByteArrayOutputStream bos = new ByteArrayOutputStream();
    ByteStreams.copy(urlConn.getInputStream(), bos);
    return bos.toByteArray();
}

From source file:co.cask.cdap.common.test.PluginJarHelper.java

public static Location createPluginJar(LocationFactory locationFactory, Manifest manifest, Class<?> clz,
        Class<?>... classes) throws IOException {

    // include all packages from the given plugin classes
    // for example, a plugin may use the org.apache.spark.streaming.kafka.KafkaUtils class,
    // which would otherwise get filtered out by the org.apache.spark package filter.
    Set<String> includePackages = new HashSet<>();
    includePackages.add("org.apache.hadoop.hbase");
    includePackages.add(clz.getPackage().getName());
    for (Class<?> clazz : classes) {
        includePackages.add(clazz.getPackage().getName());
    }//from   ww w  .ja  v a 2 s  .c  o m

    ApplicationBundler bundler = new ApplicationBundler(
            ImmutableList.of("co.cask.cdap.api", "org.apache.hadoop", "org.apache.hive", "org.apache.spark"),
            includePackages);
    Location jarLocation = locationFactory.create(clz.getName()).getTempFile(".jar");
    ClassLoader oldClassLoader = ClassLoaders.setContextClassLoader(clz.getClassLoader());
    try {
        bundler.createBundle(jarLocation, clz, classes);
    } finally {
        ClassLoaders.setContextClassLoader(oldClassLoader);
    }

    Location deployJar = locationFactory.create(clz.getName()).getTempFile(".jar");
    Manifest jarManifest = new Manifest(manifest);
    jarManifest.getMainAttributes().put(Attributes.Name.MANIFEST_VERSION, "1.0");

    // Create the program jar for deployment. It removes the "classes/" prefix as that's the convention taken
    // by the ApplicationBundler inside Twill.
    try (JarOutputStream jarOutput = new JarOutputStream(deployJar.getOutputStream(), jarManifest);
            JarInputStream jarInput = new JarInputStream(jarLocation.getInputStream())) {
        JarEntry jarEntry = jarInput.getNextJarEntry();
        while (jarEntry != null) {
            boolean isDir = jarEntry.isDirectory();
            String entryName = jarEntry.getName();
            if (!entryName.equals("classes/")) {
                if (entryName.startsWith("classes/")) {
                    jarEntry = new JarEntry(entryName.substring("classes/".length()));
                } else {
                    jarEntry = new JarEntry(entryName);
                }

                // TODO: this is due to manifest possibly already existing in the jar, but we also
                // create a manifest programatically so it's possible to have a duplicate entry here
                if ("META-INF/MANIFEST.MF".equalsIgnoreCase(jarEntry.getName())) {
                    jarEntry = jarInput.getNextJarEntry();
                    continue;
                }

                jarOutput.putNextEntry(jarEntry);
                if (!isDir) {
                    ByteStreams.copy(jarInput, jarOutput);
                }
            }

            jarEntry = jarInput.getNextJarEntry();
        }
    }

    return deployJar;
}

From source file:net.minecraftforge.gradle.user.TaskSingleDeobfBin.java

@TaskAction
public void doTask() throws IOException {
    final Map<String, String> methods = Maps.newHashMap();
    final Map<String, String> fields = Maps.newHashMap();

    // read CSV files
    CSVReader reader = Constants.getReader(getMethodCsv());
    for (String[] s : reader.readAll()) {
        methods.put(s[0], s[1]);/*  w  w w. j a va 2s. c om*/
    }

    reader = Constants.getReader(getFieldCsv());
    for (String[] s : reader.readAll()) {
        fields.put(s[0], s[1]);
    }

    // actually do the jar copy..
    File input = getInJar();
    File output = getOutJar();

    output.getParentFile().mkdirs();

    // begin reading jar
    ZipInputStream zin = new ZipInputStream(new FileInputStream(input));
    JarOutputStream zout = new JarOutputStream(new FileOutputStream(output));
    ZipEntry entry = null;

    while ((entry = zin.getNextEntry()) != null) {
        // resources or directories.
        if (entry.isDirectory() || !entry.getName().endsWith(".class")) {
            zout.putNextEntry(new JarEntry(entry));
            ByteStreams.copy(zin, zout);
            zout.closeEntry();
        } else {
            // classes
            zout.putNextEntry(new JarEntry(entry.getName()));
            zout.write(deobfClass(ByteStreams.toByteArray(zin), methods, fields));
            zout.closeEntry();
        }
    }

    zout.close();
    zin.close();
}