Example usage for com.amazonaws.services.s3.model ObjectMetadata ObjectMetadata

List of usage examples for com.amazonaws.services.s3.model ObjectMetadata ObjectMetadata

Introduction

In this page you can find the example usage for com.amazonaws.services.s3.model ObjectMetadata ObjectMetadata.

Prototype

public ObjectMetadata() 

Source Link

Usage

From source file:org.finra.dm.dao.impl.MockS3OperationsImpl.java

License:Apache License

/**
 * <p>//from w w w .j ava2 s. c  o m
 * Creates and returns a new {@link ObjectMetadata} with the given parameters. Content length is defaulted to 1 bytes unless a hint is provided.
 * </p>
 * <p>
 * Takes the following hints when filePath is suffixed:
 * </p>
 * <dl>
 * <p/>
 * <dt>AMAZON_THROTTLING_EXCEPTION</dt>
 * <dd>Throws AmazonServiceException with the error code "ThrottlingException"</dd>
 * <p/>
 * <dt>MOCK_S3_FILE_NAME_SERVICE_EXCEPTION</dt>
 * <dd>Throws AmazonServiceException</dd>
 * <p/>
 * <dt>MOCK_S3_FILE_NAME_NOT_FOUND</dt>
 * <dd>Throws AmazonServiceException with status code SC_NOT_FOUND</dd>
 * <p/>
 * <dt>MOCK_S3_FILE_NAME_0_BYTE_SIZE</dt>
 * <dd>Sets content length to 0 bytes</dd>
 * <p/>
 * </dl>
 */
@Override
public ObjectMetadata getObjectMetadata(String sourceBucketName, String filePath, AmazonS3Client s3Client) {
    ObjectMetadata objectMetadata = new ObjectMetadata();

    if (filePath.endsWith(MockAwsOperationsHelper.AMAZON_THROTTLING_EXCEPTION)) {
        AmazonServiceException throttlingException = new AmazonServiceException("test throttling exception");
        throttlingException.setErrorCode("ThrottlingException");

        throw throttlingException;
    } else if (filePath.endsWith(MOCK_S3_FILE_NAME_SERVICE_EXCEPTION)) {
        throw new AmazonServiceException(null);
    } else if (filePath.endsWith(MOCK_S3_FILE_NAME_NOT_FOUND)) {
        AmazonServiceException exception = new AmazonServiceException(null);
        exception.setStatusCode(HttpStatus.SC_NOT_FOUND);
        throw exception;
    } else if (filePath.endsWith(MOCK_S3_FILE_NAME_0_BYTE_SIZE)) {
        objectMetadata.setContentLength(AbstractCoreTest.FILE_SIZE_0_BYTE);
    } else {
        objectMetadata.setContentLength(AbstractCoreTest.FILE_SIZE_1_KB);
    }

    return objectMetadata;
}

From source file:org.finra.dm.dao.impl.MockS3OperationsImpl.java

License:Apache License

/**
 * Puts an object into a bucket. Creates a new bucket if the bucket does not already exist.
 *
 * @throws IllegalArgumentException when there is an error reading from input stream.
 *//*from  w w  w. ja va2  s  . c o m*/
@Override
public PutObjectResult putObject(PutObjectRequest putObjectRequest, AmazonS3Client s3Client) {
    LOGGER.debug("putObject(): putObjectRequest.getBucketName() = " + putObjectRequest.getBucketName()
            + ", putObjectRequest.getKey() = " + putObjectRequest.getKey());

    String s3BucketName = putObjectRequest.getBucketName();
    InputStream inputStream = putObjectRequest.getInputStream();

    ObjectMetadata metadata = putObjectRequest.getMetadata();
    if (metadata == null) {
        metadata = new ObjectMetadata();
    }

    File file = putObjectRequest.getFile();
    if (file != null) {
        try {
            inputStream = new FileInputStream(file);
            metadata.setContentLength(file.length());
        } catch (FileNotFoundException e) {
            throw new IllegalArgumentException("File not found " + file, e);
        }
    }
    String s3ObjectKey = putObjectRequest.getKey();

    byte[] s3ObjectData;
    try {
        s3ObjectData = IOUtils.toByteArray(inputStream);
    } catch (IOException e) {
        throw new IllegalArgumentException("Error converting input stream into byte array", e);
    } finally {
        try {
            inputStream.close();
        } catch (IOException e) {
            LOGGER.error("Error closing stream " + inputStream, e);
        }
    }

    MockS3Bucket mockS3Bucket = getOrCreateBucket(s3BucketName);

    MockS3Object mockS3Object = new MockS3Object();
    mockS3Object.setKey(s3ObjectKey);
    mockS3Object.setData(s3ObjectData);
    mockS3Object.setObjectMetadata(metadata);

    mockS3Bucket.getObjects().put(s3ObjectKey, mockS3Object);

    return new PutObjectResult();
}

From source file:org.finra.dm.dao.impl.S3DaoImpl.java

License:Apache License

@Override
public void createDirectory(final S3FileTransferRequestParamsDto params) {
    // Create metadata for the directory marker and set content-length to 0 bytes.
    ObjectMetadata metadata = new ObjectMetadata();
    metadata.setContentLength(0);/*from   w w  w .j a va2s .  com*/
    prepareMetadata(params, metadata);

    // Create empty content.
    InputStream emptyContent = new ByteArrayInputStream(new byte[0]);

    // Create a PutObjectRequest passing the folder name suffixed by '/'.
    String directoryName = params.getS3KeyPrefix() + (params.getS3KeyPrefix().endsWith("/") ? "" : "/");
    PutObjectRequest putObjectRequest = new PutObjectRequest(params.getS3BucketName(), directoryName,
            emptyContent, metadata);

    AmazonS3Client s3Client = null;

    try {
        s3Client = getAmazonS3(params);
        s3Operations.putObject(putObjectRequest, s3Client);
    } catch (AmazonServiceException e) {
        throw new IllegalStateException(
                String.format("Failed to create 0 byte S3 object with \"%s\" key in bucket \"%s\". Reason: %s",
                        directoryName, params.getS3BucketName(), e.getMessage()),
                e);
    } finally {
        // Shutdown the AmazonS3Client instance to release resources.
        if (s3Client != null) {
            s3Client.shutdown();
        }
    }
}

From source file:org.finra.dm.dao.impl.S3DaoImpl.java

License:Apache License

@Override
public S3FileTransferResultsDto uploadFile(final S3FileTransferRequestParamsDto params)
        throws InterruptedException {
    LOGGER.info(String.format("Uploading %s local file to s3://%s/%s ...", params.getLocalPath(),
            params.getS3BucketName(), params.getS3KeyPrefix()));

    // Perform the transfer.
    S3FileTransferResultsDto results = performTransfer(params, new Transferer() {
        @Override/*from w  w  w . j  a  v  a 2 s  . co m*/
        public Transfer performTransfer(TransferManager transferManager) {
            // Get a handle to the local file.
            File localFile = new File(params.getLocalPath());

            // Create and prepare the metadata.
            ObjectMetadata metadata = new ObjectMetadata();
            prepareMetadata(params, metadata);

            // Create a put request and a transfer manager with the parameters and the metadata.
            PutObjectRequest putObjectRequest = new PutObjectRequest(params.getS3BucketName(),
                    params.getS3KeyPrefix(), localFile);
            putObjectRequest.setMetadata(metadata);

            return s3Operations.upload(putObjectRequest, transferManager);
        }
    });

    LOGGER.info("Local file \"" + params.getLocalPath() + "\" contains " + results.getTotalBytesTransferred()
            + " byte(s) which was successfully transferred to S3 key prefix \"" + params.getS3KeyPrefix()
            + "\" in bucket \"" + params.getS3BucketName() + "\" in "
            + DmDateUtils.formatDuration(results.getDurationMillis(), true));

    LOGGER.info(String.format("Overall transfer rate: %.2f kBytes/s (%.2f Mbits/s)",
            getTransferRateInKilobytesPerSecond(results.getTotalBytesTransferred(),
                    results.getDurationMillis()),
            getTransferRateInMegabitsPerSecond(results.getTotalBytesTransferred(),
                    results.getDurationMillis())));

    return results;
}

From source file:org.finra.herd.dao.impl.MockS3OperationsImpl.java

License:Apache License

/**
 * {@inheritDoc}//w  ww  .  ja  va 2s.  c o  m
 * <p/>
 * This implementation creates a new bucket if the bucket does not already exist.
 */
@Override
public PutObjectResult putObject(PutObjectRequest putObjectRequest, AmazonS3 s3Client) {
    LOGGER.debug("putObject(): putObjectRequest.getBucketName() = " + putObjectRequest.getBucketName()
            + ", putObjectRequest.getKey() = " + putObjectRequest.getKey());

    String s3BucketName = putObjectRequest.getBucketName();
    InputStream inputStream = putObjectRequest.getInputStream();

    ObjectMetadata metadata = putObjectRequest.getMetadata();
    if (metadata == null) {
        metadata = new ObjectMetadata();
    }

    File file = putObjectRequest.getFile();
    if (file != null) {
        try {
            inputStream = new FileInputStream(file);
            metadata.setContentLength(file.length());
        } catch (FileNotFoundException e) {
            throw new IllegalArgumentException("File not found " + file, e);
        }
    }

    String s3ObjectKey = putObjectRequest.getKey();
    String s3ObjectVersion = MOCK_S3_BUCKET_NAME_VERSIONING_ENABLED.equals(putObjectRequest.getBucketName())
            ? UUID.randomUUID().toString()
            : null;
    String s3ObjectKeyVersion = s3ObjectKey + (s3ObjectVersion != null ? s3ObjectVersion : "");

    byte[] s3ObjectData;
    try {
        s3ObjectData = IOUtils.toByteArray(inputStream);
        metadata.setContentLength(s3ObjectData.length);
    } catch (IOException e) {
        throw new IllegalArgumentException("Error converting input stream into byte array", e);
    } finally {
        try {
            inputStream.close();
        } catch (IOException e) {
            LOGGER.error("Error closing stream " + inputStream, e);
        }
    }

    // Update the Last-Modified header value. This value not being set causes NullPointerException in S3Dao download related unit tests.
    metadata.setLastModified(new Date());

    MockS3Bucket mockS3Bucket = getOrCreateBucket(s3BucketName);

    MockS3Object mockS3Object = new MockS3Object();
    mockS3Object.setKey(s3ObjectKey);
    mockS3Object.setVersion(s3ObjectVersion);
    mockS3Object.setData(s3ObjectData);
    mockS3Object.setObjectMetadata(metadata);

    if (putObjectRequest.getTagging() != null) {
        mockS3Object.setTags(putObjectRequest.getTagging().getTagSet());
    }

    mockS3Bucket.getObjects().put(s3ObjectKey, mockS3Object);
    mockS3Bucket.getVersions().put(s3ObjectKeyVersion, mockS3Object);

    return new PutObjectResult();
}

From source file:org.finra.herd.dao.impl.MockS3OperationsImpl.java

License:Apache License

@Override
public MultipleFileUpload uploadFileList(String bucketName, String virtualDirectoryKeyPrefix, File directory,
        List<File> files, ObjectMetadataProvider metadataProvider, TransferManager transferManager) {
    LOGGER.debug("uploadFileList(): bucketName = " + bucketName + ", virtualDirectoryKeyPrefix = "
            + virtualDirectoryKeyPrefix + ", directory = " + directory + ", files = " + files);

    String directoryPath = directory.getAbsolutePath();

    long totalFileLength = 0;
    List<Upload> subTransfers = new ArrayList<>();
    for (File file : files) {
        // Get path to file relative to the specified directory
        String relativeFilePath = file.getAbsolutePath().substring(directoryPath.length());

        // Replace any backslashes (i.e. Windows separator) with a forward slash.
        relativeFilePath = relativeFilePath.replace("\\", "/");

        // Remove any leading slashes
        relativeFilePath = relativeFilePath.replaceAll("^/+", "");

        long fileLength = file.length();

        // Remove any trailing slashes
        virtualDirectoryKeyPrefix = virtualDirectoryKeyPrefix.replaceAll("/+$", "");

        String s3ObjectKey = virtualDirectoryKeyPrefix + "/" + relativeFilePath;
        totalFileLength += fileLength;/*  www.  j a va  2  s  .c  o m*/

        PutObjectRequest putObjectRequest = new PutObjectRequest(bucketName, s3ObjectKey, file);

        ObjectMetadata objectMetadata = new ObjectMetadata();
        metadataProvider.provideObjectMetadata(null, objectMetadata);
        putObjectRequest.setMetadata(objectMetadata);

        putObject(putObjectRequest, transferManager.getAmazonS3Client());

        subTransfers.add(new UploadImpl(null, null, null, null));
    }

    TransferProgress progress = new TransferProgress();
    progress.setTotalBytesToTransfer(totalFileLength);
    progress.updateProgress(totalFileLength);

    MultipleFileUploadImpl multipleFileUpload = new MultipleFileUploadImpl(null, progress, null,
            virtualDirectoryKeyPrefix, bucketName, subTransfers);
    multipleFileUpload.setState(TransferState.Completed);
    return multipleFileUpload;
}

From source file:org.finra.herd.dao.impl.S3DaoImpl.java

License:Apache License

@Override
public S3FileTransferResultsDto copyFile(final S3FileCopyRequestParamsDto params) throws InterruptedException {
    LOGGER.info(//from  w w  w .  ja  v a 2 s. c  om
            "Copying S3 object... sourceS3Key=\"{}\" sourceS3BucketName=\"{}\" targetS3Key=\"{}\" targetS3BucketName=\"{}\"",
            params.getSourceObjectKey(), params.getSourceBucketName(), params.getTargetObjectKey(),
            params.getTargetBucketName());

    // Perform the copy.
    S3FileTransferResultsDto results = performTransfer(params, new Transferer() {
        @Override
        public Transfer performTransfer(TransferManager transferManager) {
            // Create a copy request.
            CopyObjectRequest copyObjectRequest = new CopyObjectRequest(params.getSourceBucketName(),
                    params.getSourceObjectKey(), params.getTargetBucketName(), params.getTargetObjectKey());

            // If KMS Key ID is specified, set the AWS Key Management System parameters to be used to encrypt the object.
            if (StringUtils.isNotBlank(params.getKmsKeyId())) {
                copyObjectRequest
                        .withSSEAwsKeyManagementParams(new SSEAwsKeyManagementParams(params.getKmsKeyId()));
            }
            // Otherwise, specify the server-side encryption algorithm for encrypting the object using AWS-managed keys.
            else {
                ObjectMetadata metadata = new ObjectMetadata();
                metadata.setSSEAlgorithm(ObjectMetadata.AES_256_SERVER_SIDE_ENCRYPTION);
                copyObjectRequest.setNewObjectMetadata(metadata);
            }

            return s3Operations.copyFile(copyObjectRequest, transferManager);
        }
    });

    LOGGER.info(
            "Copied S3 object. sourceS3Key=\"{}\" sourceS3BucketName=\"{}\" targetS3Key=\"{}\" targetS3BucketName=\"{}\" "
                    + "totalBytesTransferred={} transferDuration=\"{}\"",
            params.getSourceObjectKey(), params.getSourceBucketName(), params.getTargetObjectKey(),
            params.getTargetBucketName(), results.getTotalBytesTransferred(),
            HerdDateUtils.formatDuration(results.getDurationMillis()));

    logOverallTransferRate(results);

    return results;
}

From source file:org.finra.herd.dao.impl.S3DaoImpl.java

License:Apache License

@Override
public void createDirectory(final S3FileTransferRequestParamsDto params) {
    // Create metadata for the directory marker and set content-length to 0 bytes.
    ObjectMetadata metadata = new ObjectMetadata();
    metadata.setContentLength(0);//from  w w  w.j a v  a2  s . c o  m
    prepareMetadata(params, metadata);

    // Create empty content.
    InputStream emptyContent = new ByteArrayInputStream(new byte[0]);

    // Create a PutObjectRequest passing the folder name suffixed by '/'.
    String directoryName = StringUtils.appendIfMissing(params.getS3KeyPrefix(), "/");
    PutObjectRequest putObjectRequest = new PutObjectRequest(params.getS3BucketName(), directoryName,
            emptyContent, metadata);
    // KMS key ID is being set through prepareMetadata()

    AmazonS3Client s3Client = getAmazonS3(params);

    try {
        s3Operations.putObject(putObjectRequest, s3Client);
    } catch (AmazonServiceException e) {
        throw new IllegalStateException(
                String.format("Failed to create 0 byte S3 object with \"%s\" key in bucket \"%s\". Reason: %s",
                        directoryName, params.getS3BucketName(), e.getMessage()),
                e);
    } finally {
        // Shutdown the AmazonS3Client instance to release resources.
        s3Client.shutdown();
    }
}

From source file:org.finra.herd.dao.impl.S3DaoImpl.java

License:Apache License

@Override
public S3FileTransferResultsDto uploadFile(final S3FileTransferRequestParamsDto params)
        throws InterruptedException {
    LOGGER.info("Uploading local file to S3... localPath=\"{}\" s3Key=\"{}\" s3BucketName=\"{}\"",
            params.getLocalPath(), params.getS3KeyPrefix(), params.getS3BucketName());

    // Perform the transfer.
    S3FileTransferResultsDto results = performTransfer(params, new Transferer() {
        @Override//from   ww w.  java2 s.  c  o  m
        public Transfer performTransfer(TransferManager transferManager) {
            // Get a handle to the local file.
            File localFile = new File(params.getLocalPath());

            // Create and prepare the metadata.
            ObjectMetadata metadata = new ObjectMetadata();
            prepareMetadata(params, metadata);

            // Create a put request and a transfer manager with the parameters and the metadata.
            PutObjectRequest putObjectRequest = new PutObjectRequest(params.getS3BucketName(),
                    params.getS3KeyPrefix(), localFile);
            putObjectRequest.setMetadata(metadata);

            return s3Operations.upload(putObjectRequest, transferManager);
        }
    });

    LOGGER.info(
            "Uploaded local file to the S3. localPath=\"{}\" s3Key=\"{}\" s3BucketName=\"{}\" totalBytesTransferred={} transferDuration=\"{}\"",
            params.getLocalPath(), params.getS3KeyPrefix(), params.getS3BucketName(),
            results.getTotalBytesTransferred(), HerdDateUtils.formatDuration(results.getDurationMillis()));

    logOverallTransferRate(results);

    return results;
}

From source file:org.finra.herd.service.BusinessObjectDataServiceTestHelper.java

License:Apache License

/**
 * Creates an object in S3 with the prefix constructed from the given parameters. The object's full path will be {prefix}/{UUID}
 *
 * @param businessObjectFormatEntity business object format
 * @param request request with partition values and storage
 * @param businessObjectDataVersion business object data version to put
 *///from   w ww  .j ava  2s.  c o m
public void createS3Object(BusinessObjectFormatEntity businessObjectFormatEntity,
        BusinessObjectDataInvalidateUnregisteredRequest request, int businessObjectDataVersion) {
    StorageEntity storageEntity = storageDao.getStorageByName(request.getStorageName());
    String s3BucketName = storageHelper.getS3BucketAccessParams(storageEntity).getS3BucketName();

    BusinessObjectDataKey businessObjectDataKey = getBusinessObjectDataKey(request);
    businessObjectDataKey.setBusinessObjectDataVersion(businessObjectDataVersion);

    String s3KeyPrefix = s3KeyPrefixHelper.buildS3KeyPrefix(AbstractServiceTest.S3_KEY_PREFIX_VELOCITY_TEMPLATE,
            businessObjectFormatEntity, businessObjectDataKey, storageEntity.getName());
    String s3ObjectKey = s3KeyPrefix + "/test";
    PutObjectRequest putObjectRequest = new PutObjectRequest(s3BucketName, s3ObjectKey,
            new ByteArrayInputStream(new byte[1]), new ObjectMetadata());
    s3Operations.putObject(putObjectRequest, null);
}