Example usage for com.amazonaws.services.s3 AmazonS3Client AmazonS3Client

List of usage examples for com.amazonaws.services.s3 AmazonS3Client AmazonS3Client

Introduction

In this page you can find the example usage for com.amazonaws.services.s3 AmazonS3Client AmazonS3Client.

Prototype

@SdkInternalApi
AmazonS3Client(AmazonS3ClientParams s3ClientParams) 

Source Link

Document

Constructs a new client to invoke service methods on S3 using the specified parameters.

Usage

From source file:org.cloudml.connectors.BeanstalkConnector.java

License:Open Source License

public void prepareWar(File warFile, String versionLabel, String applicationName) {
    AmazonS3 s3 = new AmazonS3Client(awsCredentials);
    String bucketName = beanstalkClient.createStorageLocation().getS3Bucket();
    String key;/*from  www  . ja v  a  2s. c om*/
    try {
        key = URLEncoder.encode(warFile.getName() + "-" + versionLabel, "UTF-8");
        s3.putObject(bucketName, key, warFile);
        beanstalkClient.createApplicationVersion(new CreateApplicationVersionRequest()
                .withApplicationName(applicationName).withAutoCreateApplication(true)
                .withVersionLabel(versionLabel).withSourceBundle(new S3Location(bucketName, key)));
    } catch (UnsupportedEncodingException e) {
        // TODO Auto-generated catch block
        journal.log(Level.SEVERE, e.getMessage());
    }
}

From source file:org.cm.podd.urban.report.helper.Util.java

License:Open Source License

/**
 * Gets an instance of a S3 client which is constructed using the given
 * Context./*w  ww. j av a2  s .  c  o  m*/
 *
 * @param context An Context instance.
 * @return A default S3 client.
 */
public static AmazonS3Client getS3Client(Context context) {
    if (sS3Client == null) {
        AWSCredentials cr = new AWSCredentials() {
            @Override
            public String getAWSAccessKeyId() {
                return BuildConfig.S3_ACCESS_KEY;
            }

            @Override
            public String getAWSSecretKey() {
                return BuildConfig.S3_SECRET_KEY;
            }
        };

        sS3Client = new AmazonS3Client(cr);
    }
    return sS3Client;
}

From source file:org.commoncrawl.service.parser.ec2.EC2ParserMaster.java

License:Open Source License

private boolean doScan(boolean initialScan) throws IOException {
    try {/*from   ww w .  j a  v  a2 s  . c o  m*/
        LOG.info("Scanner Thread Starting");
        AmazonS3Client s3Client = new AmazonS3Client(new BasicAWSCredentials(s3AccessKeyId, s3SecretKey));

        ObjectListing response = s3Client.listObjects(new ListObjectsRequest()
                .withBucketName("aws-publicdatasets").withPrefix(CC_BUCKET_ROOT + CC_CRAWLLOG_SOURCE));

        do {

            LOG.info("Response Key Count:" + response.getObjectSummaries().size());

            for (S3ObjectSummary entry : response.getObjectSummaries()) {

                Matcher matcher = crawlLogPattern.matcher(entry.getKey());
                if (matcher.matches()) {
                    ParseCandidate candidate = ParseCandidate.candidateFromBucketEntry(entry.getKey());
                    if (candidate == null) {
                        LOG.error("Failed to Parse Candidate for:" + entry.getKey());
                    } else {
                        LOG.info("Candidate is:" + candidate);
                        synchronized (this) {
                            if (_complete.contains(candidate._crawlLogName)) {
                                LOG.info("Skipping completed Candidate:" + candidate);
                            } else {
                                if (!_candidates.containsEntry(candidate._timestamp, candidate)
                                        && !_active.containsKey(candidate)) {
                                    // update candidate size here ... 
                                    candidate._size = entry.getSize();
                                    LOG.info("New Candidate:" + candidate._crawlLogName + " Found");
                                    _candidates.put(candidate._timestamp, candidate);
                                } else {
                                    LOG.info("Skipping Existing Candidate:" + candidate._crawlLogName);
                                }
                            }
                        }
                    }
                }
            }

            if (response.isTruncated()) {
                response = s3Client.listNextBatchOfObjects(response);
            } else {
                break;
            }
        } while (!shutdownFlag.get());

        if (initialScan) {
            // search for completions 
            synchronized (this) {
                scanForCompletions();
            }
        }

        return true;
    } catch (IOException e) {
        LOG.error(CCStringUtils.stringifyException(e));
        return false;
    }
}

From source file:org.commoncrawl.service.parser.ec2.EC2ParserMaster.java

License:Open Source License

public void scanForCompletions() throws IOException {
    AmazonS3Client s3Client = new AmazonS3Client(new BasicAWSCredentials(s3AccessKeyId, s3SecretKey));

    ObjectListing response = s3Client.listObjects(new ListObjectsRequest().withBucketName("aws-publicdatasets")
            .withPrefix(CC_BUCKET_ROOT + CC_PARSER_INTERMEDIATE));

    do {//  w  w w. ja v  a  2 s .c om

        LOG.info("Response Key Count:" + response.getObjectSummaries().size());

        for (S3ObjectSummary entry : response.getObjectSummaries()) {
            Matcher matcher = doneFilePattern.matcher(entry.getKey());
            if (matcher.matches()) {
                ParseCandidate candidate = ParseCandidate.candidateFromBucketEntry(entry.getKey());
                if (candidate == null) {
                    LOG.error("Failed to Parse Candidate for:" + entry.getKey());
                } else {
                    long partialTimestamp = Long.parseLong(matcher.group(2));
                    long position = Long.parseLong(matcher.group(3));
                    LOG.info("Found completion for Log:" + candidate._crawlLogName + " TS:" + partialTimestamp
                            + " Pos:" + position);
                    candidate._lastValidPos = position;

                    // ok lookup existing entry if present ... 
                    ParseCandidate existingCandidate = Iterables.find(_candidates.get(candidate._timestamp),
                            Predicates.equalTo(candidate));
                    // if existing candidate found 
                    if (existingCandidate != null) {
                        LOG.info("Found existing candidate with last pos:" + existingCandidate._lastValidPos);
                        if (candidate._lastValidPos > existingCandidate._lastValidPos) {
                            existingCandidate._lastValidPos = candidate._lastValidPos;
                            if (candidate._lastValidPos == candidate._size) {
                                LOG.info("Found last pos == size for candidate:" + candidate._crawlLogName
                                        + ".REMOVING FROM ACTIVE - MOVING TO COMPLETE");
                                _candidates.remove(candidate._timestamp, candidate);
                                _complete.add(candidate._crawlLogName);
                            }
                        }
                    } else {
                        LOG.info("Skipping Completion for CrawlLog:" + candidate._crawlLogName
                                + " because existing candidate was not found.");
                    }
                }
            }
        }
        if (response.isTruncated()) {
            response = s3Client.listNextBatchOfObjects(response);
        } else {
            break;
        }
    } while (true);
}

From source file:org.commoncrawl.util.EC2MetadataTransferUtil.java

License:Open Source License

public static List<S3ObjectSummary> getMetadataPaths(String s3AccessKeyId, String s3SecretKey,
        String bucketName, String segmentPath) throws IOException {

    AmazonS3Client s3Client = new AmazonS3Client(new BasicAWSCredentials(s3AccessKeyId, s3SecretKey));

    ImmutableList.Builder<S3ObjectSummary> listBuilder = new ImmutableList.Builder<S3ObjectSummary>();

    String metadataFilterKey = segmentPath + "metadata-";
    LOG.info("Prefix Search Key is:" + metadataFilterKey);

    ObjectListing response = s3Client/*from  w  w w . j  a v a  2s. c o m*/
            .listObjects(new ListObjectsRequest().withBucketName(bucketName).withPrefix(metadataFilterKey));

    do {
        LOG.info("Response Key Count:" + response.getObjectSummaries().size());

        for (S3ObjectSummary entry : response.getObjectSummaries()) {
            listBuilder.add(entry);
        }

        if (response.isTruncated()) {
            response = s3Client.listNextBatchOfObjects(response);
        } else {
            break;
        }
    } while (true);

    return listBuilder.build();
}

From source file:org.commoncrawl.util.S3BulkTransferUtil.java

License:Open Source License

public static List<S3ObjectSummary> getPaths(String s3AccessKeyId, String s3SecretKey, String bucketName,
        String segmentPath) throws IOException {

    AmazonS3Client s3Client = new AmazonS3Client(new BasicAWSCredentials(s3AccessKeyId, s3SecretKey));

    ImmutableList.Builder<S3ObjectSummary> listBuilder = new ImmutableList.Builder<S3ObjectSummary>();

    ObjectListing response = s3Client/* w w  w  . j  a v  a  2  s  .c om*/
            .listObjects(new ListObjectsRequest().withBucketName(bucketName).withPrefix(segmentPath));

    do {
        LOG.info("Response Key Count:" + response.getObjectSummaries().size());

        for (S3ObjectSummary entry : response.getObjectSummaries()) {
            listBuilder.add(entry);
        }

        if (response.isTruncated()) {
            response = s3Client.listNextBatchOfObjects(response);
        } else {
            break;
        }
    } while (true);

    return listBuilder.build();
}

From source file:org.crypto.sse.IEX2LevAMAZON.java

License:Open Source License

/**
 * @param args//www  .  j  a  v  a 2 s.co  m
 * @throws Exception
 */
@SuppressWarnings("null")
public static void main(String[] args) throws Exception {

    //First Job
    Configuration conf = new Configuration();

    Job job = Job.getInstance(conf, "IEX-2Lev");

    job.setJarByClass(IEX2LevAMAZON.class);

    job.setMapperClass(MLK1.class);

    job.setReducerClass(RLK1.class);

    job.setMapOutputKeyClass(Text.class);

    job.setMapOutputValueClass(Text.class);

    job.setOutputKeyClass(Text.class);

    job.setNumReduceTasks(1);

    job.setOutputValueClass(ArrayListWritable.class);

    job.setInputFormatClass(FileNameKeyInputFormat.class);

    FileInputFormat.addInputPath(job, new Path(args[0]));
    FileOutputFormat.setOutputPath(job, new Path(args[1]));

    //Second Job
    Configuration conf2 = new Configuration();

    Job job2 = Job.getInstance(conf2, "IEX-2Lev");

    job2.setJarByClass(IEX2LevAMAZON.class);

    job2.setMapperClass(MLK2.class);

    job2.setReducerClass(RLK2.class);

    job2.setNumReduceTasks(1);

    job2.setMapOutputKeyClass(Text.class);

    job2.setMapOutputValueClass(Text.class);

    job2.setOutputKeyClass(Text.class);

    job2.setOutputValueClass(ArrayListWritable.class);

    job2.setInputFormatClass(FileNameKeyInputFormat.class);

    FileInputFormat.addInputPath(job2, new Path(args[0]));
    FileOutputFormat.setOutputPath(job2, new Path(args[2]));

    job.waitForCompletion(true);
    job2.waitForCompletion(true);

    //Here add your Amazon Credentials

    AWSCredentials credentials = new BasicAWSCredentials("XXXXXXXXXXXXXXXX", "XXXXXXXXXXXXXXXX");
    // create a client connection based on credentials
    AmazonS3 s3client = new AmazonS3Client(credentials);

    // create bucket - name must be unique for all S3 users
    String bucketName = "iexmaptest";

    S3Object s3object = s3client.getObject(new GetObjectRequest(bucketName, args[4]));
    System.out.println(s3object.getObjectMetadata().getContentType());
    System.out.println(s3object.getObjectMetadata().getContentLength());
    List<String> lines = new ArrayList<String>();

    String folderName = "2";

    BufferedReader reader = new BufferedReader(new InputStreamReader(s3object.getObjectContent()));
    String line;
    int counter = 0;
    while ((line = reader.readLine()) != null) {
        // can copy the content locally as well
        // using a buffered writer
        lines.add(line);
        System.out.println(line);
        // upload file to folder 
        String fileName = folderName + "/" + Integer.toString(counter);
        ByteArrayInputStream input = new ByteArrayInputStream(line.getBytes());
        s3client.putObject(bucketName, fileName, input, new ObjectMetadata());
        counter++;
    }

    Multimap<String, String> lookup = ArrayListMultimap.create();

    for (int i = 0; i < lines.size(); i++) {
        String[] tokens = lines.get(i).split("\\s+");
        for (int j = 1; j < tokens.length; j++) {
            lookup.put(tokens[0], tokens[j]);
        }
    }

    // Loading inverted index that associates files identifiers to keywords
    lines = new ArrayList<String>();
    s3object = s3client.getObject(new GetObjectRequest(bucketName, args[5]));
    System.out.println(s3object.getObjectMetadata().getContentType());
    System.out.println(s3object.getObjectMetadata().getContentLength());

    // Loading inverted index that associates keywords to identifiers

    reader = new BufferedReader(new InputStreamReader(s3object.getObjectContent()));
    while ((line = reader.readLine()) != null) {
        lines.add(line);
    }
    Multimap<String, String> lookup2 = ArrayListMultimap.create();
    for (int i = 0; i < lines.size(); i++) {
        String[] tokens = lines.get(i).split("\\s+");
        for (int j = 1; j < tokens.length; j++) {
            lookup2.put(tokens[0], tokens[j]);
        }
    }

    // Delete File
    try {
        s3client.deleteObject(new DeleteObjectRequest(bucketName, args[4]));
    } catch (AmazonServiceException ase) {
        System.out.println("Caught an AmazonServiceException.");
        System.out.println("Error Message:    " + ase.getMessage());
        System.out.println("HTTP Status Code: " + ase.getStatusCode());
        System.out.println("AWS Error Code:   " + ase.getErrorCode());
        System.out.println("Error Type:       " + ase.getErrorType());
        System.out.println("Request ID:       " + ase.getRequestId());
    } catch (AmazonClientException ace) {
        System.out.println("Caught an AmazonClientException.");
        System.out.println("Error Message: " + ace.getMessage());
    }

    /*
     * Start of IEX-2Lev construction
     */

    // Generation of keys for IEX-2Lev
    BufferedReader keyRead = new BufferedReader(new InputStreamReader(System.in));
    System.out.println("Enter your password :");
    String pass = keyRead.readLine();

    // You can change the size of the key; Here we set it to 128

    List<byte[]> listSK = IEX2Lev.keyGen(128, pass, "salt/salt", 100);

    // Generation of Local Multi-maps with Mapper job only without reducer

    Configuration conf3 = new Configuration();

    String testSerialization1 = new String(Base64.encodeBase64(Serializer.serialize(lookup)));
    String testSerialization2 = new String(Base64.encodeBase64(Serializer.serialize(lookup2)));

    String testSerialization3 = new String(Base64.encodeBase64(Serializer.serialize(listSK)));

    //String testSerialization2 = gson.toJson(lookup2);
    conf3.set("lookup", testSerialization1);
    conf3.set("lookup2", testSerialization2);
    conf3.set("setKeys", testSerialization3);

    Job job3 = Job.getInstance(conf3, "Local MM");

    job3.setJarByClass(IEX2LevAMAZON.class);

    job3.setMapperClass(LocalMM.class);

    job3.setNumReduceTasks(0);

    FileInputFormat.addInputPath(job3, new Path(args[2]));
    FileOutputFormat.setOutputPath(job3, new Path(args[3]));

    job3.waitForCompletion(true);

}

From source file:org.cto.VVS3Box.S3Sample.java

License:Open Source License

public static void main(String[] args) throws IOException {
    /*/*w  ww.  jav a2  s . c  o  m*/
     * This credentials provider implementation loads your AWS credentials
     * from a properties file at the root of your classpath.
     *
     * Important: Be sure to fill in your AWS access credentials in the
     *            AwsCredentials.properties file before you try to run this
     *            sample.
     * http://aws.amazon.com/security-credentials
     */
    AmazonS3 s3 = new AmazonS3Client(new ClasspathPropertiesFileCredentialsProvider());
    Region usWest2 = Region.getRegion(Regions.US_WEST_2);
    s3.setRegion(usWest2);

    String bucketName = "lior.test-" + UUID.randomUUID();
    String key = "MyObjectKey";

    System.out.println("===========================================");
    System.out.println("Getting Started with Amazon S3");
    System.out.println("===========================================\n");

    try {
        /*
         * Create a new S3 bucket - Amazon S3 bucket names are globally unique,
         * so once a bucket name has been taken by any user, you can't create
         * another bucket with that same name.
         *
         * You can optionally specify a location for your bucket if you want to
         * keep your data closer to your applications or users.
         */
        System.out.println("Creating bucket " + bucketName + "\n");
        s3.createBucket(bucketName);

        /*
         * List the buckets in your account
         */
        System.out.println("Listing buckets");
        for (Bucket bucket : s3.listBuckets()) {
            System.out.println(" - " + bucket.getName());
        }
        System.out.println();

        /*
         * Upload an object to your bucket - You can easily upload a file to
         * S3, or upload directly an InputStream if you know the length of
         * the data in the stream. You can also specify your own metadata
         * when uploading to S3, which allows you set a variety of options
         * like content-type and content-encoding, plus additional metadata
         * specific to your applications.
         */
        System.out.println("Uploading a new object to S3 from a file\n");
        s3.putObject(new PutObjectRequest(bucketName, key, createSampleFile()));

        /*
         * Download an object - When you download an object, you get all of
         * the object's metadata and a stream from which to read the contents.
         * It's important to read the contents of the stream as quickly as
         * possibly since the data is streamed directly from Amazon S3 and your
         * network connection will remain open until you read all the data or
         * close the input stream.
         *
         * GetObjectRequest also supports several other options, including
         * conditional downloading of objects based on modification times,
         * ETags, and selectively downloading a range of an object.
         */
        System.out.println("Downloading an object");
        S3Object object = s3.getObject(new GetObjectRequest(bucketName, key));
        System.out.println("Content-Type: " + object.getObjectMetadata().getContentType());
        displayTextInputStream(object.getObjectContent());

        /*
         * List objects in your bucket by prefix - There are many options for
         * listing the objects in your bucket.  Keep in mind that buckets with
         * many objects might truncate their results when listing their objects,
         * so be sure to check if the returned object listing is truncated, and
         * use the AmazonS3.listNextBatchOfObjects(...) operation to retrieve
         * additional results.
         */
        System.out.println("Listing objects");
        ObjectListing objectListing = s3
                .listObjects(new ListObjectsRequest().withBucketName(bucketName).withPrefix("My"));
        for (S3ObjectSummary objectSummary : objectListing.getObjectSummaries()) {
            System.out.println(
                    " - " + objectSummary.getKey() + "  " + "(size = " + objectSummary.getSize() + ")");
        }
        System.out.println();

        /*
         * Delete an object - Unless versioning has been turned on for your bucket,
         * there is no way to undelete an object, so use caution when deleting objects.
         */
        System.out.println("Deleting an object\n");
        s3.deleteObject(bucketName, key);

        /*
         * Delete a bucket - A bucket must be completely empty before it can be
         * deleted, so remember to delete any objects from your buckets before
         * you try to delete them.
         */
        System.out.println("Deleting bucket " + bucketName + "\n");
        s3.deleteBucket(bucketName);
    } catch (AmazonServiceException ase) {
        System.out.println("Caught an AmazonServiceException, which means your request made it "
                + "to Amazon S3, but was rejected with an error response for some reason.");
        System.out.println("Error Message:    " + ase.getMessage());
        System.out.println("HTTP Status Code: " + ase.getStatusCode());
        System.out.println("AWS Error Code:   " + ase.getErrorCode());
        System.out.println("Error Type:       " + ase.getErrorType());
        System.out.println("Request ID:       " + ase.getRequestId());
    } catch (AmazonClientException ace) {
        System.out.println("Caught an AmazonClientException, which means the client encountered "
                + "a serious internal problem while trying to communicate with S3, "
                + "such as not being able to access the network.");
        System.out.println("Error Message: " + ace.getMessage());
    }
}

From source file:org.davidmendoza.fileUpload.web.VideoController.java

License:Open Source License

@RequestMapping(value = "/s3", method = RequestMethod.GET)
public String s3(HttpServletRequest req, HttpServletResponse resp) {

    AWSCredentials credentials = new BasicAWSCredentials(awsAccessKey, awsSecretKey);
    AmazonS3 s3Client = new AmazonS3Client(credentials);

    BucketPolicy policy = s3Client.getBucketPolicy(bucket);

    return "video/s3";
}

From source file:org.deeplearning4j.aws.s3.BaseS3.java

License:Apache License

public AmazonS3 getClient() {
    return new AmazonS3Client(creds);
}