Example usage for com.amazonaws.services.s3.model S3Object getObjectContent

List of usage examples for com.amazonaws.services.s3.model S3Object getObjectContent

Introduction

In this page you can find the example usage for com.amazonaws.services.s3.model S3Object getObjectContent.

Prototype

public S3ObjectInputStream getObjectContent() 

Source Link

Document

Gets the input stream containing the contents of this object.

Usage

From source file:org.systemsbiology.athero.SimpleStoreActivitiesS3Impl.java

License:Open Source License

/**
 * /*w w w.  jav  a 2s .co m*/
 * @param bucketName
 *          Name of S3 bucket
 * @param remoteName
 *          Key to use for uploaded S3 object
 * @param localName
 *          Name of the file locally
 * @param toBox
 *          This is an output parameter here.  
 *          Used to communicate the name of the box that runs download activity
 * @return
 *      A Value object
 * @throws IOException 
 */

private String downloadFileFromS3(String bucketName, String remoteName, String localName) throws IOException {
    System.out.println("downloadFileFromS3 begin remoteName=" + remoteName + ", localName=" + localName);
    AmazonS3 storage = getS3Client();
    try {
        FileOutputStream f = new FileOutputStream(localName);
        try {
            S3Object obj = storage.getObject(bucketName, remoteName);
            InputStream inputStream = obj.getObjectContent();
            long totalSize = obj.getObjectMetadata().getContentLength();

            try {
                long totalRead = 0;
                int read = 0;
                byte[] bytes = new byte[1024];
                long lastHeartbeatTime = System.currentTimeMillis();
                while ((read = inputStream.read(bytes)) != -1) {
                    totalRead += read;
                    f.write(bytes, 0, read);
                    int progress = (int) (totalRead / totalSize * 100);
                    lastHeartbeatTime = heartbeat(lastHeartbeatTime, progress);
                }
            } finally {
                inputStream.close();
            }
        } finally {
            f.close();
        }
    } catch (AmazonServiceException e) {
        String message = "Failure downloading from S3";
        System.out.println(message);
        throw e;
    } catch (AmazonClientException e) {
        String message = "Failure downloading from S3";
        System.out.println(message);
        throw e;
    } catch (IOException e) {
        String message = "Failure downloading from S3";
        System.out.println(message);
        throw e;
    }
    // Return hostname file was downloaded to        
    System.out.println("downloadFileFromS3 done");
    return hostSpecificTaskList;
    //todo: remove after testing
}

From source file:org.tiogasolutions.lib.spring.amazon.S3ResourceLoader.java

License:Apache License

@Override
public Resource getResource(String location) {
    try {/*from  ww w  . j a  v  a2  s .c o m*/
        S3Path s3Path = parseS3Path(location);
        S3Object s3Object = client.getObject(s3Path.bucket, s3Path.key);

        InputStream inputStream = s3Object.getObjectContent();
        byte[] bytes = IoUtils.toBytes(inputStream);
        return new ByteArrayResource(bytes, location);

    } catch (Exception e) {
        throw new S3ResourceException("could not load resource from " + location, e);
    }
}

From source file:org.weakref.s3fs.util.AmazonS3ClientMock.java

License:Apache License

@Override
public CopyObjectResult copyObject(String sourceBucketName, String sourceKey, String destinationBucketName,
        String destinationKey) throws AmazonClientException, AmazonServiceException {

    S3Element element = find(sourceBucketName, sourceKey);

    if (element != null) {

        S3Object objectSource = element.getS3Object();
        // copy object with
        S3Object resObj = new S3Object();
        resObj.setBucketName(destinationBucketName);
        resObj.setKey(destinationKey);//from   w w w  .  j a va  2  s. co m
        resObj.setObjectContent(objectSource.getObjectContent());
        resObj.setObjectMetadata(objectSource.getObjectMetadata());
        resObj.setRedirectLocation(objectSource.getRedirectLocation());
        // copy perission
        AccessControlList permission = new AccessControlList();
        permission.setOwner(element.getPermission().getOwner());
        permission.grantAllPermissions(element.getPermission().getGrants().toArray(new Grant[0]));
        // maybe not exists key TODO
        objects.get(find(destinationBucketName))
                .add(new S3Element(resObj, permission, sourceKey.endsWith("/")));

        return new CopyObjectResult();
    }

    throw new AmazonServiceException("object source not found");
}

From source file:org.xwiki.blobstore.s3.internal.S3BlobStore.java

License:Open Source License

@Override
public InputStream getBlob(String path) {
    String normalizedPath = normalizePath(path);

    this.logger.debug("Getting blob '{}' from bucket '{}'", normalizedPath, this.bucket);

    S3Object object = this.client.getObject(this.bucket, normalizedPath);
    if (object != null) {
        return object.getObjectContent();
    }/* ww  w .ja  v a2 s .  c o m*/

    return null;
}

From source file:org.zalando.stups.fullstop.controller.S3Controller.java

License:Apache License

@RequestMapping(method = RequestMethod.GET, value = "/download")
public void downloadFiles(@RequestParam(value = "bucket") final String bucket,
        @RequestParam(value = "location") final String location, @RequestParam(value = "page") final int page) {

    try {/*  w  w  w .  ja  v a 2 s  . c o  m*/
        log.info("Creating fullstop directory here: {}", fullstopLoggingDir);

        boolean mkdirs = new File(fullstopLoggingDir).mkdirs();
    } catch (SecurityException e) {
        // do nothing
    }

    AmazonS3Client amazonS3Client = new AmazonS3Client();
    amazonS3Client.setRegion(Region.getRegion(Regions
            .fromName((String) cloudTrailProcessingLibraryProperties.getAsProperties().get(S3_REGION_KEY))));

    ListObjectsRequest listObjectsRequest = new ListObjectsRequest().withBucketName(bucket) //
            .withPrefix(location) //
            .withMaxKeys(page);

    ObjectListing objectListing = amazonS3Client.listObjects(listObjectsRequest);

    final List<S3ObjectSummary> s3ObjectSummaries = objectListing.getObjectSummaries();

    while (objectListing.isTruncated()) {

        objectListing = amazonS3Client.listNextBatchOfObjects(objectListing);
        s3ObjectSummaries.addAll(objectListing.getObjectSummaries());

    }

    for (S3ObjectSummary s3ObjectSummary : s3ObjectSummaries) {
        String bucketName = s3ObjectSummary.getBucketName();
        String key = s3ObjectSummary.getKey();

        S3Object object = amazonS3Client.getObject(new GetObjectRequest(bucketName, key));
        InputStream inputStream = object.getObjectContent();

        File file = new File(fullstopLoggingDir,
                object.getBucketName() + object.getObjectMetadata().getETag() + JSON_GZ);

        copyInputStreamToFile(inputStream, file);
        log.info("File saved here: {}", file.getAbsolutePath());

    }
}

From source file:oulib.aws.s3.S3Util.java

/**
 * Generate a small tiff file from large Tiff S3 bucket object <br>
 * Note: the small tiff file will have the same key path as the original one
 * // w w w. ja  v  a 2 s.  c  o  m
 * @param s3client : S3 client
 * @param s3 : S3 object that con
 * @param targetBucketName : the bucket that stores the small tiff file
 * @param targetKey : key of the object in the target bucket
 * @param compressionRate : compression rate
 * @return : PutObjectResult
 */
public static PutObjectResult generateSmallTiff(AmazonS3 s3client, S3Object s3, String targetBucketName,
        String targetKey, double compressionRate) {

    PutObjectResult result = null;
    ByteArrayOutputStream bos = null;
    ByteArrayOutputStream os = null;
    ByteArrayInputStream is = null;
    S3ObjectInputStream s = null;
    ByteArrayInputStream byteInputStream = null;

    try {
        System.setProperty("com.sun.media.jai.disableMediaLib", "true");

        bos = new ByteArrayOutputStream();
        s = s3.getObjectContent();
        byte[] bytes = IOUtils.toByteArray(s);
        byteInputStream = new ByteArrayInputStream(bytes);

        TIFFDecodeParam param = new TIFFDecodeParam();
        ImageDecoder dec = ImageCodec.createImageDecoder("TIFF", byteInputStream, param);

        RenderedImage image = dec.decodeAsRenderedImage();

        RenderingHints qualityHints = new RenderingHints(RenderingHints.KEY_RENDERING,
                RenderingHints.VALUE_RENDER_QUALITY);

        RenderedOp resizedImage = JAI.create("SubsampleAverage", image, compressionRate, compressionRate,
                qualityHints);

        TIFFEncodeParam params = new com.sun.media.jai.codec.TIFFEncodeParam();

        resizedImage = JAI.create("encode", resizedImage, bos, "TIFF", params);

        BufferedImage imagenew = resizedImage.getSourceImage(0).getAsBufferedImage();

        os = new ByteArrayOutputStream();
        ImageIO.write(imagenew, "tif", os);
        is = new ByteArrayInputStream(os.toByteArray());

        ObjectMetadata metadata = new ObjectMetadata();
        metadata.setContentLength(os.toByteArray().length);
        metadata.setContentType("image/tiff");
        metadata.setLastModified(new Date());

        os.close();

        imagenew.flush();

        result = s3client.putObject(new PutObjectRequest(targetBucketName, targetKey, is, metadata));
    } catch (IOException | AmazonClientException ex) {
        Logger.getLogger(S3Util.class.getName()).log(Level.SEVERE, null, ex);
    } finally {
        try {
            if (bos != null) {
                bos.close();
            }
            if (os != null) {
                os.close();
            }
            if (is != null) {
                is.close();
            }
            if (s != null) {
                s.close();
            }
            if (byteInputStream != null) {
                byteInputStream.close();
            }
        } catch (IOException ex) {
            Logger.getLogger(S3Util.class.getName()).log(Level.SEVERE, null, ex);
        }
    }

    return result;
}

From source file:oulib.aws.s3.S3Util.java

/**
 * Pull out Tiff metadata from input S3 object and inject into the 
 * content of target S3 Object;<br>
 * Generate the new output S3 object that has the metadata from input object.
 * //from  w  w w .  j  a  v a 2 s . com
 * @param s3client : S3 client
 * @param obj1 : input object that provides metadata
 * @param obj2 : target object that receives metadata
 * 
 * @return PutObjectResult
 */
public static PutObjectResult copyS3ObjectTiffMetadata(AmazonS3 s3client, S3Object obj1, S3Object obj2) {

    PutObjectResult result = null;

    BufferedInputStream bufferedInputStrean = null;
    ByteArrayOutputStream byteArrayOutputStream = null;
    ByteArrayInputStream byteArrayInputStream = null;
    ByteArrayInputStream bis = null;
    S3ObjectInputStream content1 = null;
    S3ObjectInputStream content2 = null;
    String targetBucketName = obj2.getBucketName();
    String outputKey = obj2.getKey().split(".tif")[0] + "-copied.tif";

    ImageMetadata metadata1, metadata2;
    TiffImageMetadata tiffMetadata1, tiffMetadata2;
    TiffOutputSet output1, output2;

    try {
        content1 = obj1.getObjectContent();
        content2 = obj2.getObjectContent();

        byte[] bytes1 = IOUtils.toByteArray(content1);
        byte[] bytes2 = IOUtils.toByteArray(content2);

        metadata1 = Imaging.getMetadata(bytes1);
        metadata2 = Imaging.getMetadata(bytes2);

        tiffMetadata1 = (TiffImageMetadata) metadata1;
        tiffMetadata2 = (TiffImageMetadata) metadata2;

        output1 = tiffMetadata1.getOutputSet();
        output2 = tiffMetadata2.getOutputSet();

        TiffOutputDirectory rootDir = output2.getOrCreateRootDirectory();
        TiffOutputDirectory exifDir = output2.getOrCreateExifDirectory();
        TiffOutputDirectory gpsDir = output2.getOrCreateGPSDirectory();

        if (null != output1.getRootDirectory()) {
            List<TiffOutputField> fs = output1.getRootDirectory().getFields();
            for (TiffOutputField f1 : fs) {
                if (null == rootDir.findField(f1.tag)
                        // CANNOT create the output image with this tag included!
                        && !"PlanarConfiguration".equals(f1.tagInfo.name)) {
                    rootDir.add(f1);
                }
            }
        }

        if (null != output1.getExifDirectory()) {
            for (TiffOutputField f2 : output1.getExifDirectory().getFields()) {
                exifDir.removeField(f2.tagInfo);
                exifDir.add(f2);
            }
        }

        if (null != output1.getGPSDirectory()) {
            for (TiffOutputField f3 : output1.getGPSDirectory().getFields()) {
                gpsDir.removeField(f3.tagInfo);
                gpsDir.add(f3);
            }
        }

        byteArrayOutputStream = new ByteArrayOutputStream();
        TiffImageWriterLossy writerLossy = new TiffImageWriterLossy(output2.byteOrder);
        writerLossy.write(byteArrayOutputStream, output2);

        byteArrayInputStream = new ByteArrayInputStream(byteArrayOutputStream.toByteArray());

        ObjectMetadata metadata = new ObjectMetadata();
        metadata.setContentLength(byteArrayOutputStream.toByteArray().length);
        metadata.setContentType("image/tiff");
        metadata.setLastModified(new Date());

        result = s3client
                .putObject(new PutObjectRequest(targetBucketName, outputKey, byteArrayInputStream, metadata));

    } catch (ImageReadException | IOException | ImageWriteException ex) {
        Logger.getLogger(S3Util.class.getName()).log(Level.SEVERE, null, ex);
    } finally {
        try {
            if (null != content1) {
                content1.close();
            }
            if (null != content2) {
                content2.close();
            }
            if (null != bufferedInputStrean) {
                bufferedInputStrean.close();
            }
            if (null != byteArrayInputStream) {
                byteArrayInputStream.close();
            }
            if (null != byteArrayOutputStream) {
                byteArrayOutputStream.close();
            }
            if (null != bis) {
                bis.close();
            }
        } catch (IOException ex) {
            Logger.getLogger(S3Util.class.getName()).log(Level.SEVERE, null, ex);
        }
    }
    return result;
}

From source file:oulib.aws.s3.S3Util.java

/**
 *  Get exif technical metadata from S3 object
 * //from  w ww.  ja  v  a 2  s .co m
 * @param s3client
 * @param s3
 * @return : TiffImageMetadata
 */
public static TiffImageMetadata retrieveExifMetadata(AmazonS3 s3client, S3Object s3) {
    TiffImageMetadata tiffMetadata = null;
    try {
        S3ObjectInputStream is = s3.getObjectContent();
        final ImageMetadata metadata = Imaging.getMetadata(is, s3.getKey());
        tiffMetadata = (TiffImageMetadata) metadata;
    } catch (ImageReadException | IOException ex) {
        Logger.getLogger(S3Util.class.getName()).log(Level.SEVERE, null, ex);
    }
    return tiffMetadata;
}

From source file:pagerank.S3Wrapper.java

License:Open Source License

public String get(String key) {
    S3Object testObj = s3.getObject(bucketName, key);
    S3ObjectInputStream inputStream = testObj.getObjectContent();
    return streamToString(inputStream);
}

From source file:pagerank.S3Wrapper.java

License:Open Source License

public static void main(String[] args) throws IOException {

    /*/*from   w  w w.ja  v  a  2 s . com*/
     * The ProfileCredentialsProvider will return your [default]
     * credential profile by reading from the credentials file located at
     * (/home/yupenglu/.aws/credentials).
     */
    AWSCredentials credentials = null;
    try {
        credentials = new ProfileCredentialsProvider("default").getCredentials();
    } catch (Exception e) {
        throw new AmazonClientException("Cannot load the credentials from the credential profiles file. "
                + "Please make sure that your credentials file is at the correct "
                + "location (/home/yupenglu/.aws/credentials), and is in valid format.", e);
    }

    AmazonS3 s3 = new AmazonS3Client(credentials);
    //        Region usWest2 = Region.getRegion(Regions.US_WEST_2);
    //        s3.setRegion(usWest2);

    //        String bucketName = "my-first-s3-bucket-" + UUID.randomUUID();
    String bucketName = "pages4.27";
    String key = "NewKey";

    System.out.println("===========================================");
    System.out.println("Getting Started with Amazon S3");
    System.out.println("===========================================\n");

    try {
        /*
         * Create a new S3 bucket - Amazon S3 bucket names are globally unique,
         * so once a bucket name has been taken by any user, you can't create
         * another bucket with that same name.
         *
         * You can optionally specify a location for your bucket if you want to
         * keep your data closer to your applications or users.
         */
        //            System.out.println("Creating bucket " + bucketName + "\n");
        //            s3.createBucket(bucketName);

        /*
         * List the buckets in your account
         */
        System.out.println("Listing buckets");
        for (Bucket bucket : s3.listBuckets()) {
            System.out.println(" - " + bucket.getName());
        }
        System.out.println();

        /*
         * Upload an object to your bucket - You can easily upload a file to
         * S3, or upload directly an InputStream if you know the length of
         * the data in the stream. You can also specify your own metadata
         * when uploading to S3, which allows you set a variety of options
         * like content-type and content-encoding, plus additional metadata
         * specific to your applications.
         */
        //            System.out.println("Uploading a new object to S3 from a file\n");
        //            s3.putObject(new PutObjectRequest(bucketName, key, createSampleFile()));

        /*
         * Download an object - When you download an object, you get all of
         * the object's metadata and a stream from which to read the contents.
         * It's important to read the contents of the stream as quickly as
         * possibly since the data is streamed directly from Amazon S3 and your
         * network connection will remain open until you read all the data or
         * close the input stream.
         *
         * GetObjectRequest also supports several other options, including
         * conditional downloading of objects based on modification times,
         * ETags, and selectively downloading a range of an object.
         */
        //            System.out.println("Downloading an object");
        //            S3Object object = s3.getObject(new GetObjectRequest(bucketName, key));
        //            System.out.println("Content-Type: "  + object.getObjectMetadata().getContentType());
        //            displayTextInputStream(object.getObjectContent());

        /*
         * List objects in your bucket by prefix - There are many options for
         * listing the objects in your bucket.  Keep in mind that buckets with
         * many objects might truncate their results when listing their objects,
         * so be sure to check if the returned object listing is truncated, and
         * use the AmazonS3.listNextBatchOfObjects(...) operation to retrieve
         * additional results.
         */
        System.out.println("Listing objects");
        //            ObjectListing objectListing = s3.listObjects(new ListObjectsRequest()
        //                    .withBucketName(bucketName)
        //                    .withPrefix("My"));
        ObjectListing objectListing = s3.listObjects(new ListObjectsRequest().withBucketName(bucketName));
        for (S3ObjectSummary objectSummary : objectListing.getObjectSummaries()) {
            System.out.println(" - " + URLDecoder.decode(objectSummary.getKey(), "UTF-8") + "  " + "(size = "
                    + objectSummary.getSize() + ")");
        }
        S3Object testObj = s3.getObject(bucketName,
                URLEncoder.encode("http://finance.yahoo.com/investing-news/", "UTF-8"));
        S3ObjectInputStream inputStream = testObj.getObjectContent();

        //            System.out.println(streamToString(inputStream));
        System.out.println();

        /*
         * Delete an object - Unless versioning has been turned on for your bucket,
         * there is no way to undelete an object, so use caution when deleting objects.
         */
        //            System.out.println("Deleting an object\n");
        //            s3.deleteObject(bucketName, key);

        /*
         * Delete a bucket - A bucket must be completely empty before it can be
         * deleted, so remember to delete any objects from your buckets before
         * you try to delete them.
         */
        //            System.out.println("Deleting bucket " + bucketName + "\n");
        //            s3.deleteBucket(bucketName);
    } catch (AmazonServiceException ase) {
        System.out.println("Caught an AmazonServiceException, which means your request made it "
                + "to Amazon S3, but was rejected with an error response for some reason.");
        System.out.println("Error Message:    " + ase.getMessage());
        System.out.println("HTTP Status Code: " + ase.getStatusCode());
        System.out.println("AWS Error Code:   " + ase.getErrorCode());
        System.out.println("Error Type:       " + ase.getErrorType());
        System.out.println("Request ID:       " + ase.getRequestId());
    } catch (AmazonClientException ace) {
        System.out.println("Caught an AmazonClientException, which means the client encountered "
                + "a serious internal problem while trying to communicate with S3, "
                + "such as not being able to access the network.");
        System.out.println("Error Message: " + ace.getMessage());
    }
}