Example usage for com.amazonaws.services.s3.model S3ObjectSummary getSize

List of usage examples for com.amazonaws.services.s3.model S3ObjectSummary getSize

Introduction

In this page you can find the example usage for com.amazonaws.services.s3.model S3ObjectSummary getSize.

Prototype

public long getSize() 

Source Link

Document

Gets the size of this object in bytes.

Usage

From source file:org.finra.herd.service.helper.StorageFileHelper.java

License:Apache License

/**
 * Validates registered S3 files per list of expected storage files. The validation ignores (does not fail) when detecting unregistered zero byte S3 files.
 *
 * @param expectedStorageFiles the list of expected S3 files represented by storage files
 * @param s3ObjectSummaries the list of actual S3 files represented by S3 object summaries
 * @param storageName the storage name/*from   w  ww. ja v  a2s .  com*/
 * @param businessObjectDataKey the business object data key
 */
public void validateRegisteredS3Files(List<StorageFile> expectedStorageFiles,
        List<S3ObjectSummary> s3ObjectSummaries, String storageName,
        BusinessObjectDataKey businessObjectDataKey) {
    // Get a set of actual S3 file paths.
    Set<String> actualS3FilePaths = new HashSet<>(getFilePathsFromS3ObjectSummaries(s3ObjectSummaries));

    // Validate existence and file size for all expected files.
    for (StorageFile expectedStorageFile : expectedStorageFiles) {
        if (!actualS3FilePaths.contains(expectedStorageFile.getFilePath())) {
            throw new ObjectNotFoundException(
                    String.format("Registered file \"%s\" does not exist in \"%s\" storage.",
                            expectedStorageFile.getFilePath(), storageName));
        }
    }

    // Get a set of expected file paths.
    Set<String> expectedFilePaths = new HashSet<>(getFilePathsFromStorageFiles(expectedStorageFiles));

    // Create a JSON representation of the business object data key.
    String businessObjectDataKeyAsJson = jsonHelper.objectToJson(businessObjectDataKey);

    // Validate that no other files in S3 bucket except for expected storage files have the same S3 key prefix.
    // Please note that this validation ignores (does not fail on) any unregistered zero byte S3 files.
    for (S3ObjectSummary s3ObjectSummary : s3ObjectSummaries) {
        if (!expectedFilePaths.contains(s3ObjectSummary.getKey())) {
            // Ignore unregistered zero byte S3 files.
            if (s3ObjectSummary.getSize() == 0) {
                LOGGER.info(
                        "Ignoring unregistered zero byte S3 file. s3Key=\"{}\" storageName=\"{}\" businessObjectDataKey={}",
                        s3ObjectSummary.getKey(), storageName, businessObjectDataKeyAsJson);
            } else {
                throw new IllegalStateException(String.format(
                        "Found unregistered non-empty S3 file \"%s\" in \"%s\" storage. Business object data {%s}",
                        s3ObjectSummary.getKey(), storageName,
                        businessObjectDataHelper.businessObjectDataKeyToString(businessObjectDataKey)));
            }
        }
    }
}

From source file:org.icgc.dcc.storage.server.repository.s3.S3ListingService.java

License:Open Source License

private ObjectInfo createInfo(S3ObjectSummary objectSummary) {
    return new ObjectInfo(getObjectId(objectSummary), objectSummary.getLastModified().getTime(),
            objectSummary.getSize());
}

From source file:org.openflamingo.fs.s3.S3ObjectProvider.java

License:Apache License

public List<FileInfo> getFiles(String path) {
    String bucket = null;//from  ww  w.  j  a  v a  2 s  . co  m

    if (!"/".equals(path)) {
        bucket = S3Utils.getBucket(path + "/");
    }

    String relativePath = S3Utils.getObjectKey(path);

    List<FileInfo> filesList = new ArrayList<FileInfo>();
    if ("".equals(relativePath)) {
        return filesList;
    }

    try {
        ObjectListing objectListing = awsClient.listObjects(
                new ListObjectsRequest().withBucketName(bucket).withPrefix(relativePath).withDelimiter("/"));

        while (true) {
            List<S3ObjectSummary> summaries = objectListing.getObjectSummaries();
            for (S3ObjectSummary objectSummary : summaries) {
                if (!objectSummary.getKey().endsWith("/")) {
                    long size = objectSummary.getSize();
                    String filename = FileUtils.getFilename(objectSummary.getKey());
                    String bucketName = objectSummary.getBucketName();
                    long modified = objectSummary.getLastModified().getTime();
                    S3ObjectInfo info = new S3ObjectInfo(bucketName, objectSummary.getKey(), filename, modified,
                            size);
                    filesList.add(info);
                }
            }

            if (!objectListing.isTruncated()) {
                break;
            }
            objectListing = awsClient.listNextBatchOfObjects(objectListing);

        }

        return filesList;
    } catch (Exception ase) {
        //            throw new FileSystemException("? ? ? ? ? ?.", ase);
        throw new FileSystemException("An error has occurred.", ase);
    }
}

From source file:org.p365.S3Sample.java

License:Open Source License

public static void main(String[] args) throws IOException {
    /*/*from  w  w  w. j  a v a 2 s .  c o  m*/
     * This credentials provider implementation loads your AWS credentials
     * from a properties file at the root of your classpath.
     *
     * Important: Be sure to fill in your AWS access credentials in the
     *            AwsCredentials.properties file before you try to run this
     *            sample.
     * http://aws.amazon.com/security-credentials
     */
    AmazonS3 s3 = new AmazonS3Client(new ClasspathPropertiesFileCredentialsProvider());
    Region usWest2 = Region.getRegion(Regions.US_WEST_2);
    s3.setRegion(usWest2);

    String bucketName = "mynewbuket";
    String key = "Myobj/sd.jpg";

    System.out.println("===========================================");
    System.out.println("Getting Started with Amazon S3");
    System.out.println("===========================================\n");

    try {
        /*
         * Create a new S3 bucket - Amazon S3 bucket names are globally unique,
         * so once a bucket name has been taken by any user, you can't create
         * another bucket with that same name.
         *
         * You can optionally specify a location for your bucket if you want to
         * keep your data closer to your applications or users.
         */
        System.out.println("Creating bucket " + bucketName + "\n");
        if (!s3.doesBucketExist(bucketName)) {
            s3.createBucket(bucketName);
        }

        /*
         * List the buckets in your account
         */
        System.out.println("Listing buckets");
        for (Bucket bucket : s3.listBuckets()) {
            System.out.println(" - " + bucket.getName());
        }
        System.out.println();

        /*
         * Upload an object to your bucket - You can easily upload a file to
         * S3, or upload directly an InputStream if you know the length of
         * the data in the stream. You can also specify your own metadata
         * when uploading to S3, which allows you set a variety of options
         * like content-type and content-encoding, plus additional metadata
         * specific to your applications.
         */
        System.out.println("Uploading a new object to S3 from a file\n");
        String pathname = "D:\\Program Files\\apache-tomcat-7.0.42\\webapps\\WorkerForP365\\src\\AAA_1465.jpg";
        File file = new File(pathname);
        s3.putObject(
                new PutObjectRequest(bucketName, key, file).withCannedAcl(CannedAccessControlList.PublicRead));

        /*
         * Download an object - When you download an object, you get all of
         * the object's metadata and a stream from which to read the contents.
         * It's important to read the contents of the stream as quickly as
         * possibly since the data is streamed directly from Amazon S3 and your
         * network connection will remain open until you read all the data or
         * close the input stream.
         *
         * GetObjectRequest also supports several other options, including
         * conditional downloading of objects based on modification times,
         * ETags, and selectively downloading a range of an object.
         */
        System.out.println("Downloading an object");
        S3Object object = s3.getObject(new GetObjectRequest(bucketName, key));
        System.out.println("Content-Type: " + object.getObjectMetadata().getContentType());
        displayTextInputStream(object.getObjectContent());

        /*
         * List objects in your bucket by prefix - There are many options for
         * listing the objects in your bucket.  Keep in mind that buckets with
         * many objects might truncate their results when listing their objects,
         * so be sure to check if the returned object listing is truncated, and
         * use the AmazonS3.listNextBatchOfObjects(...) operation to retrieve
         * additional results.
         */
        System.out.println("Listing objects");
        ObjectListing objectListing = s3
                .listObjects(new ListObjectsRequest().withBucketName(bucketName).withPrefix("My"));
        for (S3ObjectSummary objectSummary : objectListing.getObjectSummaries()) {
            System.out.println(
                    " - " + objectSummary.getKey() + "  " + "(size = " + objectSummary.getSize() + ")");
        }
        System.out.println();

        /*
         * Delete an object - Unless versioning has been turned on for your bucket,
         * there is no way to undelete an object, so use caution when deleting objects.
         */
        //System.out.println("Deleting an object\n");
        //s3.deleteObject(bucketName, key);

        /*
         * Delete a bucket - A bucket must be completely empty before it can be
         * deleted, so remember to delete any objects from your buckets before
         * you try to delete them.
         */
        //System.out.println("Deleting bucket " + bucketName + "\n");
        //s3.deleteBucket(bucketName);
    } catch (AmazonServiceException ase) {
        System.out.println("Caught an AmazonServiceException, which means your request made it "
                + "to Amazon S3, but was rejected with an error response for some reason.");
        System.out.println("Error Message:    " + ase.getMessage());
        System.out.println("HTTP Status Code: " + ase.getStatusCode());
        System.out.println("AWS Error Code:   " + ase.getErrorCode());
        System.out.println("Error Type:       " + ase.getErrorType());
        System.out.println("Request ID:       " + ase.getRequestId());
    } catch (AmazonClientException ace) {
        System.out.println("Caught an AmazonClientException, which means the client encountered "
                + "a serious internal problem while trying to communicate with S3, "
                + "such as not being able to access the network.");
        System.out.println("Error Message: " + ace.getMessage());
    }
}

From source file:org.serginho.awss3conn.Connection.java

public List<S3ObjectSummary> getFileList() {
    AmazonS3 s3Client = getS3Client();/*from  ww  w  . ja  va  2 s. c o  m*/

    ObjectListing objects = s3Client.listObjects(this.amazonBucket, this.key);

    List<S3ObjectSummary> fileList = new ArrayList<S3ObjectSummary>();

    do {
        for (S3ObjectSummary objectSummary : objects.getObjectSummaries()) {
            if (objectSummary.getSize() != 0) // We don't need to display the root "key" (folder)
                fileList.add(objectSummary);
        }
        objects = s3Client.listNextBatchOfObjects(objects);
    } while (objects.isTruncated());

    return fileList;
}

From source file:org.springframework.integration.aws.s3.core.AmazonS3OperationsImpl.java

License:Apache License

public PaginatedObjectsView listObjects(String bucketName, String folder, String nextMarker, int pageSize) {
    if (logger.isDebugEnabled()) {
        logger.debug("Listing objects from bucket " + bucketName + " and folder " + folder);
        logger.debug("Next marker is " + nextMarker + " and pageSize is " + pageSize);
    }//from  w ww  .  j a  v a  2  s.  c o  m

    Assert.notNull(StringUtils.hasText(bucketName), "Bucket name should be non null and non empty");
    String prefix = null;
    if (folder != null && !"/".equals(folder)) {
        prefix = folder;
    }
    ListObjectsRequest listObjectsRequest = new ListObjectsRequest().withBucketName(bucketName)
            .withPrefix(prefix).withMarker(nextMarker);

    if (pageSize > 0) {
        listObjectsRequest.withMaxKeys(pageSize);
    }

    ObjectListing listing = client.listObjects(listObjectsRequest);
    PaginatedObjectsView view = null;
    List<com.amazonaws.services.s3.model.S3ObjectSummary> summaries = listing.getObjectSummaries();
    if (summaries != null && !summaries.isEmpty()) {
        List<S3ObjectSummary> objectSummaries = new ArrayList<S3ObjectSummary>();
        for (final com.amazonaws.services.s3.model.S3ObjectSummary summary : summaries) {
            S3ObjectSummary summ = new S3ObjectSummary() {

                public long getSize() {
                    return summary.getSize();
                }

                public Date getLastModified() {
                    return summary.getLastModified();
                }

                public String getKey() {
                    return summary.getKey();
                }

                public String getETag() {
                    return summary.getETag();
                }

                public String getBucketName() {
                    return summary.getBucketName();
                }
            };
            objectSummaries.add(summ);
        }
        view = new PagninatedObjectsViewImpl(objectSummaries, listing.getNextMarker());
    }
    return view;
}

From source file:org.springframework.integration.aws.s3.core.DefaultAmazonS3Operations.java

License:Apache License

/**
 * The implementation that uses the AWS SDK to list objects from the given bucket
 *
 * @param bucketName The bucket in which we want to list the objects in
 * @param nextMarker The number of objects can be very large and this serves as the marker
 *                 for remembering the last record fetch in the last retrieve operation.
 * @param pageSize The max number of records to be retrieved in one list object operation.
 * @param prefix The prefix for the list operation, this can serve as the folder whose contents
 *              are to be listed./*from   ww w.j a  va 2s  .co  m*/
 */
@Override
protected PaginatedObjectsView doListObjects(String bucketName, String nextMarker, int pageSize,
        String prefix) {

    ListObjectsRequest listObjectsRequest = new ListObjectsRequest().withBucketName(bucketName)
            .withPrefix(prefix).withMarker(nextMarker);

    if (pageSize > 0) {
        listObjectsRequest.withMaxKeys(pageSize);
    }

    ObjectListing listing = client.listObjects(listObjectsRequest);
    PaginatedObjectsView view = null;
    List<com.amazonaws.services.s3.model.S3ObjectSummary> summaries = listing.getObjectSummaries();
    if (summaries != null && !summaries.isEmpty()) {
        List<S3ObjectSummary> objectSummaries = new ArrayList<S3ObjectSummary>();
        for (final com.amazonaws.services.s3.model.S3ObjectSummary summary : summaries) {
            S3ObjectSummary summ = new S3ObjectSummary() {

                public long getSize() {
                    return summary.getSize();
                }

                public Date getLastModified() {
                    return summary.getLastModified();
                }

                public String getKey() {
                    return summary.getKey();
                }

                public String getETag() {
                    return summary.getETag();
                }

                public String getBucketName() {
                    return summary.getBucketName();
                }
            };
            objectSummaries.add(summ);
        }
        view = new PagninatedObjectsViewImpl(objectSummaries, listing.getNextMarker());
    }
    return view;
}

From source file:org.symphonyoss.vb.mail.MailReader.java

License:Apache License

public List<SymMessage> getMessages(String bucket, String prefix) {

    AwsS3Client awsS3Client = new AwsS3Client();

    List<SymMessage> messages = new ArrayList<>();

    List<S3ObjectSummary> objectSummaries = awsS3Client.getAllObjects(System.getProperty(BotConfig.S3_BUCKET),
            System.getProperty(BotConfig.MAIL_S3_PREFIX_INCOMING));

    for (S3ObjectSummary objectSummary : objectSummaries) {

        if (objectSummary.getKey().equals(prefix))
            continue;

        logger.info("New mail file: {}:{}:{}", objectSummary.getKey(), objectSummary.getSize(),
                objectSummary.getLastModified());

        try {/*  www.j a  va 2 s.c  o m*/

            Message message = Mailer.getMessage(awsS3Client.getObject(objectSummary));

            //Couldn't convert it.
            if (message == null)
                continue;

            try {

                SymMessage symMessage = getSymMessage(message);

                if (symMessage != null) {

                    logger.info("New mail message: from: {}, subject: {}, body: {}",
                            symMessage.getSymUser().getEmailAddress(), message.getSubject(),
                            symMessage.getMessage());
                    messages.add(symMessage);
                }

            } catch (SymException ex) {

                logger.error("Could not convert email to SymMessage from file [{}]", objectSummary.getKey(),
                        ex);

            }

            String destKey = objectSummary.getKey().substring(objectSummary.getKey().lastIndexOf("/") + 1);

            //logger.info("DEST FILE: {}", destKey);

            awsS3Client.moveObject(objectSummary, System.getProperty(BotConfig.S3_BUCKET),
                    System.getProperty(BotConfig.MAIL_S3_PREFIX_PROCESSED) + destKey);

        } catch (Exception e) {
            logger.error("Failed to process incoming email [{}]", objectSummary.getKey(), e);
        }

    }

    return messages;
}

From source file:org.zalando.stups.fullstop.plugin.SaveSecurityGroupsPlugin.java

License:Apache License

private List<String> listS3Objects(String bucketName, String prefix) {
    final List<String> commonPrefixes = Lists.newArrayList();

    AmazonS3Client s3client = new AmazonS3Client();

    try {/* ww  w. j a  v a  2 s.  co  m*/
        System.out.println("Listing objects");

        ListObjectsRequest listObjectsRequest = new ListObjectsRequest().withDelimiter("/")
                .withBucketName(bucketName).withPrefix(prefix);

        ObjectListing objectListing;

        do {
            objectListing = s3client.listObjects(listObjectsRequest);
            commonPrefixes.addAll(objectListing.getCommonPrefixes());
            for (S3ObjectSummary objectSummary : objectListing.getObjectSummaries()) {
                System.out.println(
                        " - " + objectSummary.getKey() + "  " + "(size = " + objectSummary.getSize() + ")");
            }
            listObjectsRequest.setMarker(objectListing.getNextMarker());
        } while (objectListing.isTruncated());

    } catch (AmazonServiceException ase) {
        System.out.println("Caught an AmazonServiceException, " + "which means your request made it "
                + "to Amazon S3, but was rejected with an error response " + "for some reason.");
        System.out.println("Error Message:    " + ase.getMessage());
        System.out.println("HTTP Status Code: " + ase.getStatusCode());
        System.out.println("AWS Error Code:   " + ase.getErrorCode());
        System.out.println("Error Type:       " + ase.getErrorType());
        System.out.println("Request ID:       " + ase.getRequestId());
    } catch (AmazonClientException ace) {
        System.out.println("Caught an AmazonClientException, " + "which means the client encountered "
                + "an internal error while trying to communicate" + " with S3, "
                + "such as not being able to access the network.");
        System.out.println("Error Message: " + ace.getMessage());
    }

    return commonPrefixes;
}

From source file:pagerank.S3Wrapper.java

License:Open Source License

public static void main(String[] args) throws IOException {

    /*//  ww  w  . j  a v a2s. com
     * The ProfileCredentialsProvider will return your [default]
     * credential profile by reading from the credentials file located at
     * (/home/yupenglu/.aws/credentials).
     */
    AWSCredentials credentials = null;
    try {
        credentials = new ProfileCredentialsProvider("default").getCredentials();
    } catch (Exception e) {
        throw new AmazonClientException("Cannot load the credentials from the credential profiles file. "
                + "Please make sure that your credentials file is at the correct "
                + "location (/home/yupenglu/.aws/credentials), and is in valid format.", e);
    }

    AmazonS3 s3 = new AmazonS3Client(credentials);
    //        Region usWest2 = Region.getRegion(Regions.US_WEST_2);
    //        s3.setRegion(usWest2);

    //        String bucketName = "my-first-s3-bucket-" + UUID.randomUUID();
    String bucketName = "pages4.27";
    String key = "NewKey";

    System.out.println("===========================================");
    System.out.println("Getting Started with Amazon S3");
    System.out.println("===========================================\n");

    try {
        /*
         * Create a new S3 bucket - Amazon S3 bucket names are globally unique,
         * so once a bucket name has been taken by any user, you can't create
         * another bucket with that same name.
         *
         * You can optionally specify a location for your bucket if you want to
         * keep your data closer to your applications or users.
         */
        //            System.out.println("Creating bucket " + bucketName + "\n");
        //            s3.createBucket(bucketName);

        /*
         * List the buckets in your account
         */
        System.out.println("Listing buckets");
        for (Bucket bucket : s3.listBuckets()) {
            System.out.println(" - " + bucket.getName());
        }
        System.out.println();

        /*
         * Upload an object to your bucket - You can easily upload a file to
         * S3, or upload directly an InputStream if you know the length of
         * the data in the stream. You can also specify your own metadata
         * when uploading to S3, which allows you set a variety of options
         * like content-type and content-encoding, plus additional metadata
         * specific to your applications.
         */
        //            System.out.println("Uploading a new object to S3 from a file\n");
        //            s3.putObject(new PutObjectRequest(bucketName, key, createSampleFile()));

        /*
         * Download an object - When you download an object, you get all of
         * the object's metadata and a stream from which to read the contents.
         * It's important to read the contents of the stream as quickly as
         * possibly since the data is streamed directly from Amazon S3 and your
         * network connection will remain open until you read all the data or
         * close the input stream.
         *
         * GetObjectRequest also supports several other options, including
         * conditional downloading of objects based on modification times,
         * ETags, and selectively downloading a range of an object.
         */
        //            System.out.println("Downloading an object");
        //            S3Object object = s3.getObject(new GetObjectRequest(bucketName, key));
        //            System.out.println("Content-Type: "  + object.getObjectMetadata().getContentType());
        //            displayTextInputStream(object.getObjectContent());

        /*
         * List objects in your bucket by prefix - There are many options for
         * listing the objects in your bucket.  Keep in mind that buckets with
         * many objects might truncate their results when listing their objects,
         * so be sure to check if the returned object listing is truncated, and
         * use the AmazonS3.listNextBatchOfObjects(...) operation to retrieve
         * additional results.
         */
        System.out.println("Listing objects");
        //            ObjectListing objectListing = s3.listObjects(new ListObjectsRequest()
        //                    .withBucketName(bucketName)
        //                    .withPrefix("My"));
        ObjectListing objectListing = s3.listObjects(new ListObjectsRequest().withBucketName(bucketName));
        for (S3ObjectSummary objectSummary : objectListing.getObjectSummaries()) {
            System.out.println(" - " + URLDecoder.decode(objectSummary.getKey(), "UTF-8") + "  " + "(size = "
                    + objectSummary.getSize() + ")");
        }
        S3Object testObj = s3.getObject(bucketName,
                URLEncoder.encode("http://finance.yahoo.com/investing-news/", "UTF-8"));
        S3ObjectInputStream inputStream = testObj.getObjectContent();

        //            System.out.println(streamToString(inputStream));
        System.out.println();

        /*
         * Delete an object - Unless versioning has been turned on for your bucket,
         * there is no way to undelete an object, so use caution when deleting objects.
         */
        //            System.out.println("Deleting an object\n");
        //            s3.deleteObject(bucketName, key);

        /*
         * Delete a bucket - A bucket must be completely empty before it can be
         * deleted, so remember to delete any objects from your buckets before
         * you try to delete them.
         */
        //            System.out.println("Deleting bucket " + bucketName + "\n");
        //            s3.deleteBucket(bucketName);
    } catch (AmazonServiceException ase) {
        System.out.println("Caught an AmazonServiceException, which means your request made it "
                + "to Amazon S3, but was rejected with an error response for some reason.");
        System.out.println("Error Message:    " + ase.getMessage());
        System.out.println("HTTP Status Code: " + ase.getStatusCode());
        System.out.println("AWS Error Code:   " + ase.getErrorCode());
        System.out.println("Error Type:       " + ase.getErrorType());
        System.out.println("Request ID:       " + ase.getRequestId());
    } catch (AmazonClientException ace) {
        System.out.println("Caught an AmazonClientException, which means the client encountered "
                + "a serious internal problem while trying to communicate with S3, "
                + "such as not being able to access the network.");
        System.out.println("Error Message: " + ace.getMessage());
    }
}