Example usage for com.amazonaws.services.s3 AmazonS3Client AmazonS3Client

List of usage examples for com.amazonaws.services.s3 AmazonS3Client AmazonS3Client

Introduction

In this page you can find the example usage for com.amazonaws.services.s3 AmazonS3Client AmazonS3Client.

Prototype

@SdkInternalApi
AmazonS3Client(AmazonS3ClientParams s3ClientParams) 

Source Link

Document

Constructs a new client to invoke service methods on S3 using the specified parameters.

Usage

From source file:com.athena.dolly.web.aws.s3.S3Service.java

License:Open Source License

@PostConstruct
public void init() {
    AWSCredentials credentials = new BasicAWSCredentials(accessKey, secretKey);
    s3 = new AmazonS3Client(credentials);
}

From source file:com.atlantbh.jmeter.plugins.aws.s3.AWSS3Downloader.java

License:Apache License

@Override
public SampleResult sample(Entry arg0) {
    LOGGER.info("Download started....");
    SampleResult result = new SampleResult();
    result.setSampleLabel(getName());/* w ww  .  j  a v  a2 s  .  c  o  m*/
    result.setDataType(SampleResult.TEXT);
    result.sampleStart();
    try {
        BasicAWSCredentials creds = new BasicAWSCredentials(getKey(), getSecret());
        AmazonS3 client = new AmazonS3Client(creds);
        S3Object s3Object = client.getObject(new GetObjectRequest(new S3ObjectId(getBucket(), getObject())));
        InputStream is = s3Object.getObjectContent();
        BufferedReader reader = new BufferedReader(new InputStreamReader(is));
        BufferedWriter bufferedWriter = new BufferedWriter(new FileWriter(new File(getDestination())));
        char[] buffer = new char[1024 * 1024];
        while (true) {
            int c = reader.read(buffer);
            if (c == -1) {
                break;
            }
            bufferedWriter.write(buffer);
        }
        reader.close();
        bufferedWriter.close();
        LOGGER.info("Download finished.");
        result.setResponseData("Download finished".getBytes());
        result.setSuccessful(!false);
        result.setResponseCode("200");
        result.setResponseMessage("Downloaded");
    } catch (Exception e) {
        LOGGER.info("Download error.");
        result.setResponseData(("Download error: " + e.getMessage()).getBytes());
        result.setSuccessful(false);
        result.setResponseCode("500");
        result.setResponseMessage("Error");
    }
    result.sampleEnd();
    return result;
}

From source file:com.atlantbh.jmeter.plugins.aws.s3.AWSS3Reader.java

License:Apache License

@Override
public SampleResult sample(Entry arg0) {
    LOGGER.info("Read started....");
    SampleResult result = new SampleResult();
    result.setSampleLabel(getName());// w ww.java2  s  .  c  om
    result.setDataType(SampleResult.TEXT);
    result.sampleStart();
    try {
        BasicAWSCredentials creds = new BasicAWSCredentials(getKey(), getSecret());
        AmazonS3 client = new AmazonS3Client(creds);
        S3Object s3Object = client.getObject(new GetObjectRequest(new S3ObjectId(getBucket(), getObject())));
        InputStream is = s3Object.getObjectContent();
        BufferedReader reader = new BufferedReader(new InputStreamReader(is));

        String line;
        StringBuilder sb = new StringBuilder();
        while ((line = reader.readLine()) != null) {
            sb.append(line);
        }
        result.setResponseData(sb.toString().getBytes());
        reader.close();
        LOGGER.info("Read finished.");
        result.setSuccessful(!false);
        result.setResponseCode("200");
        result.setResponseMessage("Read done");
        result.setContentType("text/plain");
    } catch (Exception e) {
        LOGGER.info("Read error.");
        result.setResponseData(("Read error: " + e.getMessage()).getBytes());
        result.setSuccessful(false);
        result.setResponseCode("500");
        result.setResponseMessage("Error");
    }
    result.sampleEnd();
    return result;
}

From source file:com.atlantbh.jmeter.plugins.aws.s3.AWSS3Uploader.java

License:Apache License

@Override
public SampleResult sample(Entry arg0) {
    LOGGER.info("Upload started....");
    SampleResult result = new SampleResult();
    result.setSampleLabel(getName());//from   ww w  .j  a v  a  2 s . c o  m
    result.setDataType(SampleResult.TEXT);
    result.sampleStart();
    try {
        BasicAWSCredentials creds = new BasicAWSCredentials(getKey(), getSecret());
        AmazonS3 client = new AmazonS3Client(creds);

        client.putObject(getBucket(), getDestination(), new File(getObject()));

        LOGGER.info("Upload finished.");
        result.setResponseData("Upload finished".getBytes());
        result.setSuccessful(!false);
        result.setResponseCode("200");
        result.setResponseMessage("Uploaded");
    } catch (Exception e) {
        LOGGER.info("Upload error.");
        result.setResponseData(("Upload error: " + e.getMessage()).getBytes());
        result.setSuccessful(false);
        result.setResponseCode("500");
        result.setResponseMessage("Error");
    }
    result.sampleEnd();
    return result;
}

From source file:com.atlassian.localstack.sample.S3Sample.java

License:Open Source License

public static void runTest(AWSCredentials credentials) throws IOException {

    AmazonS3 s3 = new AmazonS3Client(credentials);
    Region usWest2 = Region.getRegion(Regions.US_WEST_2);
    s3.setRegion(usWest2);//from   w  ww  .j a  va2 s .c  om
    s3.setEndpoint(LocalstackTestRunner.getEndpointS3());

    String bucketName = "my-first-s3-bucket-" + UUID.randomUUID();
    String key = "MyObjectKey";

    System.out.println("===========================================");
    System.out.println("Getting Started with Amazon S3");
    System.out.println("===========================================\n");

    /*
     * Create a new S3 bucket - Amazon S3 bucket names are globally unique,
     * so once a bucket name has been taken by any user, you can't create
     * another bucket with that same name.
     *
     * You can optionally specify a location for your bucket if you want to
     * keep your data closer to your applications or users.
     */
    System.out.println("Creating bucket " + bucketName + "\n");
    s3.createBucket(bucketName);

    /*
     * List the buckets in your account
     */
    System.out.println("Listing buckets");
    for (Bucket bucket : s3.listBuckets()) {
        System.out.println(" - " + bucket.getName());
    }
    System.out.println();

    /*
     * Upload an object to your bucket - You can easily upload a file to
     * S3, or upload directly an InputStream if you know the length of
     * the data in the stream. You can also specify your own metadata
     * when uploading to S3, which allows you set a variety of options
     * like content-type and content-encoding, plus additional metadata
     * specific to your applications.
     */
    System.out.println("Uploading a new object to S3 from a file\n");
    s3.putObject(new PutObjectRequest(bucketName, key, createSampleFile()));

    /*
     * Download an object - When you download an object, you get all of
     * the object's metadata and a stream from which to read the contents.
     * It's important to read the contents of the stream as quickly as
     * possibly since the data is streamed directly from Amazon S3 and your
     * network connection will remain open until you read all the data or
     * close the input stream.
     *
     * GetObjectRequest also supports several other options, including
     * conditional downloading of objects based on modification times,
     * ETags, and selectively downloading a range of an object.
     */
    System.out.println("Downloading an object");
    S3Object object = s3.getObject(new GetObjectRequest(bucketName, key));
    System.out.println("Content-Type: " + object.getObjectMetadata().getContentType());
    displayTextInputStream(object.getObjectContent());

    /*
     * List objects in your bucket by prefix - There are many options for
     * listing the objects in your bucket.  Keep in mind that buckets with
     * many objects might truncate their results when listing their objects,
     * so be sure to check if the returned object listing is truncated, and
     * use the AmazonS3.listNextBatchOfObjects(...) operation to retrieve
     * additional results.
     */
    System.out.println("Listing objects");
    ObjectListing objectListing = s3
            .listObjects(new ListObjectsRequest().withBucketName(bucketName).withPrefix("My"));
    for (S3ObjectSummary objectSummary : objectListing.getObjectSummaries()) {
        System.out.println(" - " + objectSummary.getKey() + "  " + "(size = " + objectSummary.getSize() + ")");
    }
    System.out.println();

    /*
     * Delete an object - Unless versioning has been turned on for your bucket,
     * there is no way to undelete an object, so use caution when deleting objects.
     */
    System.out.println("Deleting an object\n");
    s3.deleteObject(bucketName, key);

    /*
     * Delete a bucket - A bucket must be completely empty before it can be
     * deleted, so remember to delete any objects from your buckets before
     * you try to delete them.
     */
    System.out.println("Deleting bucket " + bucketName + "\n");
    s3.deleteBucket(bucketName);
}

From source file:com.bye.project.S3TransferProgressSample.java

License:Open Source License

public static void main(String[] args) throws Exception {
    /*/*from  w  ww . j  a  v a 2s.  c  o  m*/
     * This credentials provider implementation loads your AWS credentials
     * from a properties file at the root of your classpath.
     *
     * TransferManager manages a pool of threads, so we create a
     * single instance and share it throughout our application.
     */
    AmazonS3 s3 = new AmazonS3Client(credentials = new PropertiesCredentials(
            S3TransferProgressSample.class.getResourceAsStream("AwsCredentials.properties")));
    Region usWest2 = Region.getRegion(Regions.US_WEST_2);
    s3.setRegion(usWest2);
    tx = new TransferManager(s3);

    bucketName = "s3-upload-sdk-sample-" + credentials.getAWSAccessKeyId().toLowerCase();

    new S3TransferProgressSample();
}

From source file:com.carrotgarden.nexus.aws.s3.publish.amazon.AmazonServiceProvider.java

License:BSD License

@Override
public synchronized void start() {

    if (configBean == null) {
        throw new IllegalStateException("config is missing");
    }/*  w w w  .  j av  a 2  s. c o  m*/

    if (client == null) {

        client = new AmazonS3Client(new PluginCredentialsProvider());
        client.setEndpoint(configBean.endpoint());
    } else {
        // throw new IllegalStateException("client is present");
    }

    if (healthFuture == null) {
        final int period = configBean.healthPeriod();
        healthFuture = scheduler.scheduleAtFixedRate( //
                healtTask, 0, period, TimeUnit.SECONDS);
    } else {
        // throw new IllegalStateException("future is present");
    }

    checkCount = 0;

    isRunning = true;

    log.info("\n\t ### start");

}

From source file:com.casadocodigo.ecommerce.infra.AmazonFileSaver.java

public String write(Part part) {

    try {/*w  w  w . j a v  a 2s.  c  o m*/
        AmazonS3 s3client = new AmazonS3Client(new ProfileCredentialsProvider());

        String fileName = extractFilename(part.getHeader(CONTENT_DISPOSITION));

        OutputStream out = null;
        InputStream filecontent = null;

        File f = new File(fileName);

        out = new FileOutputStream(f);
        filecontent = part.getInputStream();

        int read;
        final byte[] bytes = new byte[1024];

        while ((read = filecontent.read(bytes)) != -1) {
            out.write(bytes, 0, read);
        }

        s3client.putObject(BUCKET_NAME, fileName, f);

        return "";
    } catch (FileNotFoundException ex) {
        Logger.getLogger(AmazonFileSaver.class.getName()).log(Level.SEVERE, null, ex);
    } catch (IOException | AmazonClientException ex) {
        Logger.getLogger(AmazonFileSaver.class.getName()).log(Level.SEVERE, null, ex);
    }
    return "";
}

From source file:com.casadocodigo.ecommerce.infra.AmazonFileSaver.java

public String write(String baseFolder, Part multipartFile) throws IOException {

    String fileName = extractFilename(multipartFile.getHeader(CONTENT_DISPOSITION));

    String path = baseFolder + File.separator + fileName;

    AmazonS3 s3client = new AmazonS3Client(new ProfileCredentialsProvider());

    ObjectMetadata metaData = new ObjectMetadata();
    byte[] bytes = IOUtils.toByteArray(multipartFile.getInputStream());
    metaData.setContentLength(bytes.length);
    /*ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(bytes);
    PutObjectRequest putObjectRequest = new PutObjectRequest(bucket, key, byteArrayInputStream, metadata);
    client.putObject(putObjectRequest);*/

    s3client.putObject(new PutObjectRequest(BUCKET_NAME, path, multipartFile.getInputStream(), metaData)
            .withCannedAcl(CannedAccessControlList.PublicRead));

    /*s3client.putObject(BUCKET_NAME,fileName, 
        multipartFile.getInputStream(), metaData);*/
    return END_POINT + File.separator + BUCKET_NAME + File.separator + path;

}

From source file:com.cirrus.server.osgi.service.amazon.s3.AmazonS3StorageService.java

License:Apache License

@Override
public void authenticate(final AccessKeyPasswordAuthenticator trustedToken) {
    final String accessKey = trustedToken.getAccessKey();
    final String accessSecret = trustedToken.getAccessPassword();
    final AWSCredentials credentials = new BasicAWSCredentials(accessKey, accessSecret);
    this.amazonS3Client = new AmazonS3Client(credentials);
    this.createBucketIfNotExist();
}