Example usage for com.amazonaws.services.s3 AmazonS3Client shutdown

List of usage examples for com.amazonaws.services.s3 AmazonS3Client shutdown

Introduction

In this page you can find the example usage for com.amazonaws.services.s3 AmazonS3Client shutdown.

Prototype

public void shutdown() 

Source Link

Document

Shuts down this client object, releasing any resources that might be held open.

Usage

From source file:org.finra.herd.dao.impl.S3DaoImpl.java

License:Apache License

@Override
public ObjectMetadata getObjectMetadata(final S3FileTransferRequestParamsDto params) {
    AmazonS3Client s3Client = getAmazonS3(params);

    try {//  w ww. j  av  a2 s.com
        return s3Operations.getObjectMetadata(params.getS3BucketName(), params.getS3KeyPrefix(), s3Client);
    } catch (AmazonServiceException e) {
        if (e.getStatusCode() == HttpStatus.SC_NOT_FOUND) {
            return null;
        }

        throw new IllegalStateException(
                String.format("Failed to get S3 metadata for object key \"%s\" from bucket \"%s\". Reason: %s",
                        params.getS3KeyPrefix(), params.getS3BucketName(), e.getMessage()),
                e);
    } finally {
        // Shutdown the AmazonS3Client instance to release resources.
        s3Client.shutdown();
    }
}

From source file:org.finra.herd.dao.impl.S3DaoImpl.java

License:Apache License

@Override
public Properties getProperties(String bucketName, String key,
        S3FileTransferRequestParamsDto s3FileTransferRequestParamsDto) {
    AmazonS3Client s3Client = getAmazonS3(s3FileTransferRequestParamsDto);

    try {/* ww  w.  java  2s . c  o  m*/
        S3Object s3Object = getS3Object(s3Client, bucketName, key, true);
        return javaPropertiesHelper.getProperties(s3Object.getObjectContent());
    } catch (IllegalArgumentException e) {
        throw new IllegalArgumentException(
                "The properties file in S3 bucket '" + bucketName + "' and key '" + key + "' is invalid.", e);
    } finally {
        s3Client.shutdown();
    }
}

From source file:org.finra.herd.dao.impl.S3DaoImpl.java

License:Apache License

@Override
public List<S3ObjectSummary> listDirectory(final S3FileTransferRequestParamsDto params,
        boolean ignoreZeroByteDirectoryMarkers) {
    Assert.isTrue(!isRootKeyPrefix(params.getS3KeyPrefix()),
            "Listing of S3 objects from root directory is not allowed.");

    AmazonS3Client s3Client = getAmazonS3(params);
    List<S3ObjectSummary> s3ObjectSummaries = new ArrayList<>();

    try {/*from ww  w  . j  a va2s .c  o  m*/
        ListObjectsRequest listObjectsRequest = new ListObjectsRequest()
                .withBucketName(params.getS3BucketName()).withPrefix(params.getS3KeyPrefix());
        ObjectListing objectListing;

        do {
            objectListing = s3Operations.listObjects(listObjectsRequest, s3Client);

            for (S3ObjectSummary objectSummary : objectListing.getObjectSummaries()) {
                // Ignore 0 byte objects that represent S3 directories.
                if (!(ignoreZeroByteDirectoryMarkers && objectSummary.getKey().endsWith("/")
                        && objectSummary.getSize() == 0L)) {
                    s3ObjectSummaries.add(objectSummary);
                }
            }

            listObjectsRequest.setMarker(objectListing.getNextMarker());
        } while (objectListing.isTruncated());
    } catch (AmazonS3Exception amazonS3Exception) {
        if (S3Operations.ERROR_CODE_NO_SUCH_BUCKET.equals(amazonS3Exception.getErrorCode())) {
            throw new IllegalArgumentException(
                    "The specified bucket '" + params.getS3BucketName() + "' does not exist.",
                    amazonS3Exception);
        }
        throw new IllegalStateException("Error accessing S3", amazonS3Exception);
    } catch (AmazonClientException e) {
        throw new IllegalStateException(
                String.format("Failed to list keys with prefix \"%s\" from bucket \"%s\". Reason: %s",
                        params.getS3KeyPrefix(), params.getS3BucketName(), e.getMessage()),
                e);
    } finally {
        // Shutdown the AmazonS3Client instance to release resources.
        s3Client.shutdown();
    }

    return s3ObjectSummaries;
}

From source file:org.finra.herd.dao.impl.S3DaoImpl.java

License:Apache License

@Override
public List<S3VersionSummary> listVersions(final S3FileTransferRequestParamsDto params) {
    Assert.isTrue(!isRootKeyPrefix(params.getS3KeyPrefix()),
            "Listing of S3 versions from root directory is not allowed.");

    AmazonS3Client s3Client = getAmazonS3(params);
    List<S3VersionSummary> s3VersionSummaries = new ArrayList<>();

    try {/*from   w w w.  j  a  v  a  2  s .c om*/
        ListVersionsRequest listVersionsRequest = new ListVersionsRequest()
                .withBucketName(params.getS3BucketName()).withPrefix(params.getS3KeyPrefix());
        VersionListing versionListing;

        do {
            versionListing = s3Operations.listVersions(listVersionsRequest, s3Client);
            s3VersionSummaries.addAll(versionListing.getVersionSummaries());
            listVersionsRequest.setKeyMarker(versionListing.getNextKeyMarker());
            listVersionsRequest.setVersionIdMarker(versionListing.getNextVersionIdMarker());
        } while (versionListing.isTruncated());
    } catch (AmazonS3Exception amazonS3Exception) {
        if (S3Operations.ERROR_CODE_NO_SUCH_BUCKET.equals(amazonS3Exception.getErrorCode())) {
            throw new IllegalArgumentException(
                    "The specified bucket '" + params.getS3BucketName() + "' does not exist.",
                    amazonS3Exception);
        }
        throw new IllegalStateException("Error accessing S3", amazonS3Exception);
    } catch (AmazonClientException e) {
        throw new IllegalStateException(
                String.format("Failed to list S3 versions with prefix \"%s\" from bucket \"%s\". Reason: %s",
                        params.getS3KeyPrefix(), params.getS3BucketName(), e.getMessage()),
                e);
    } finally {
        // Shutdown the AmazonS3Client instance to release resources.
        s3Client.shutdown();
    }

    return s3VersionSummaries;
}

From source file:org.finra.herd.dao.impl.S3DaoImpl.java

License:Apache License

@Override
public void restoreObjects(final S3FileTransferRequestParamsDto params, int expirationInDays,
        String archiveRetrievalOption) {
    LOGGER.info("Restoring a list of objects in S3... s3KeyPrefix=\"{}\" s3BucketName=\"{}\" s3KeyCount={}",
            params.getS3KeyPrefix(), params.getS3BucketName(), params.getFiles().size());

    if (!CollectionUtils.isEmpty(params.getFiles())) {
        // Initialize a key value pair for the error message in the catch block.
        String key = params.getFiles().get(0).getPath().replaceAll("\\\\", "/");

        try {//from w  w w.  j ava2  s.co m
            // Create an S3 client.
            AmazonS3Client s3Client = getAmazonS3(params);

            // Create a restore object request.
            RestoreObjectRequest requestRestore = new RestoreObjectRequest(params.getS3BucketName(), null,
                    expirationInDays);
            // Make Bulk the default archive retrieval option if the option is not provided
            requestRestore.setGlacierJobParameters(new GlacierJobParameters()
                    .withTier(StringUtils.isNotEmpty(archiveRetrievalOption) ? archiveRetrievalOption
                            : Tier.Bulk.toString()));

            try {
                for (File file : params.getFiles()) {
                    key = file.getPath().replaceAll("\\\\", "/");
                    ObjectMetadata objectMetadata = s3Operations.getObjectMetadata(params.getS3BucketName(),
                            key, s3Client);

                    // Request a restore for objects that are not already being restored.
                    if (BooleanUtils.isNotTrue(objectMetadata.getOngoingRestore())) {
                        requestRestore.setKey(key);
                        s3Operations.restoreObject(requestRestore, s3Client);
                    }
                }
            } finally {
                s3Client.shutdown();
            }
        } catch (Exception e) {
            throw new IllegalStateException(String.format(
                    "Failed to initiate a restore request for \"%s\" key in \"%s\" bucket. Reason: %s", key,
                    params.getS3BucketName(), e.getMessage()), e);
        }
    }
}

From source file:org.finra.herd.dao.impl.S3DaoImpl.java

License:Apache License

@Override
public boolean s3FileExists(S3FileTransferRequestParamsDto params) throws RuntimeException {
    AmazonS3Client s3Client = getAmazonS3(params);

    try {/*from   ww  w  .j a  v a  2s  .  c  o m*/
        S3Object s3Object = getS3Object(s3Client, params.getS3BucketName(), params.getS3KeyPrefix(), false);
        return (s3Object != null);
    } finally {
        s3Client.shutdown();
    }
}

From source file:org.finra.herd.dao.impl.S3DaoImpl.java

License:Apache License

@Override
public void validateGlacierS3FilesRestored(S3FileTransferRequestParamsDto params) throws RuntimeException {
    LOGGER.info(/*  w  w  w. j a  va 2  s.  co m*/
            "Checking for already restored Glacier storage class objects... s3KeyPrefix=\"{}\" s3BucketName=\"{}\" s3KeyCount={}",
            params.getS3KeyPrefix(), params.getS3BucketName(), params.getFiles().size());

    if (!CollectionUtils.isEmpty(params.getFiles())) {
        // Initialize a key value pair for the error message in the catch block.
        String key = params.getFiles().get(0).getPath().replaceAll("\\\\", "/");

        try {
            // Create an S3 client.
            AmazonS3Client s3Client = getAmazonS3(params);

            try {
                for (File file : params.getFiles()) {
                    key = file.getPath().replaceAll("\\\\", "/");
                    ObjectMetadata objectMetadata = s3Operations.getObjectMetadata(params.getS3BucketName(),
                            key, s3Client);

                    // Fail if a not already restored object is detected.
                    if (BooleanUtils.isNotFalse(objectMetadata.getOngoingRestore())) {
                        throw new IllegalArgumentException(String.format(
                                "Archived Glacier S3 file \"%s\" is not restored. StorageClass {%s}, OngoingRestore flag {%s}, S3 bucket name {%s}",
                                key, objectMetadata.getStorageClass(), objectMetadata.getOngoingRestore(),
                                params.getS3BucketName()));
                    }
                }
            } finally {
                s3Client.shutdown();
            }
        } catch (AmazonServiceException e) {
            throw new IllegalStateException(
                    String.format("Fail to check restore status for \"%s\" key in \"%s\" bucket. Reason: %s",
                            key, params.getS3BucketName(), e.getMessage()),
                    e);
        }
    }
}

From source file:org.finra.herd.dao.impl.S3DaoImpl.java

License:Apache License

private void tagVersionsHelper(final S3FileTransferRequestParamsDto s3FileTransferRequestParamsDto,
        final S3FileTransferRequestParamsDto s3ObjectTaggerParamsDto,
        final List<S3VersionSummary> s3VersionSummaries, final Tag tag) {
    // Initialize an S3 version for the error message in the catch block.
    S3VersionSummary currentS3VersionSummary = s3VersionSummaries.get(0);

    // Amazon S3 client to access S3 objects.
    AmazonS3Client s3Client = null;

    // Amazon S3 client for S3 object tagging.
    AmazonS3Client s3ObjectTaggerClient = null;

    try {/* w  w  w.  j  a  v  a  2 s .com*/
        // Create an S3 client to access S3 objects.
        s3Client = getAmazonS3(s3FileTransferRequestParamsDto);

        // Create an S3 client for S3 object tagging.
        s3ObjectTaggerClient = getAmazonS3(s3ObjectTaggerParamsDto);

        // Create a get object tagging request.
        GetObjectTaggingRequest getObjectTaggingRequest = new GetObjectTaggingRequest(
                s3FileTransferRequestParamsDto.getS3BucketName(), null, null);

        // Create a set object tagging request.
        SetObjectTaggingRequest setObjectTaggingRequest = new SetObjectTaggingRequest(
                s3FileTransferRequestParamsDto.getS3BucketName(), null, null, null);

        for (S3VersionSummary s3VersionSummary : s3VersionSummaries) {
            // Set the current S3 version summary.
            currentS3VersionSummary = s3VersionSummary;

            // Retrieve the current tagging information for the S3 version.
            getObjectTaggingRequest.setKey(s3VersionSummary.getKey());
            getObjectTaggingRequest.setVersionId(s3VersionSummary.getVersionId());
            GetObjectTaggingResult getObjectTaggingResult = s3Operations
                    .getObjectTagging(getObjectTaggingRequest, s3Client);

            // Update the list of tags to include the specified S3 object tag.
            List<Tag> updatedTags = new ArrayList<>();
            updatedTags.add(tag);
            if (CollectionUtils.isNotEmpty(getObjectTaggingResult.getTagSet())) {
                for (Tag currentTag : getObjectTaggingResult.getTagSet()) {
                    if (!StringUtils.equals(tag.getKey(), currentTag.getKey())) {
                        updatedTags.add(currentTag);
                    }
                }
            }

            // Update tagging information for the S3 version.
            setObjectTaggingRequest.setKey(s3VersionSummary.getKey());
            setObjectTaggingRequest.setVersionId(s3VersionSummary.getVersionId());
            setObjectTaggingRequest.setTagging(new ObjectTagging(updatedTags));
            s3Operations.setObjectTagging(setObjectTaggingRequest, s3ObjectTaggerClient);
        }
    } catch (Exception e) {
        throw new IllegalStateException(String.format(
                "Failed to tag S3 object with \"%s\" key and \"%s\" version id in \"%s\" bucket. Reason: %s",
                currentS3VersionSummary.getKey(), currentS3VersionSummary.getVersionId(),
                s3FileTransferRequestParamsDto.getS3BucketName(), e.getMessage()), e);
    } finally {
        if (s3Client != null) {
            s3Client.shutdown();
        }

        if (s3ObjectTaggerClient != null) {
            s3ObjectTaggerClient.shutdown();
        }
    }
}

From source file:org.geowebcache.s3.S3BlobStore.java

License:Open Source License

@Override
public void destroy() {
    this.shutDown = true;
    AmazonS3Client conn = this.conn;
    this.conn = null;
    if (conn != null) {
        s3Ops.shutDown();//from  w ww  . java2s .com
        conn.shutdown();
    }
}

From source file:org.opendedup.sdfs.filestore.cloud.BatchAwsS3ChunkStore.java

License:Open Source License

public static boolean checkAuth(String awsAccessKey, String awsSecretKey) {
    BasicAWSCredentials creds = null;//w  w  w.  j a v  a2  s  .  c  o  m
    try {
        creds = new BasicAWSCredentials(awsAccessKey, awsSecretKey);
        AmazonS3Client s3Service = new AmazonS3Client(creds);
        s3Service.listBuckets();
        s3Service.shutdown();
        return true;
    } catch (Exception e) {
        SDFSLogger.getLog().fatal("Unable to authenticate to AWS", e);
        return false;
    }
}