List of usage examples for com.amazonaws.auth ClasspathPropertiesFileCredentialsProvider ClasspathPropertiesFileCredentialsProvider
public ClasspathPropertiesFileCredentialsProvider()
AwsCredentials.properties
file from the classpath to read AWS security credentials. From source file:com.uiintl.backup.agent.AwsBackupAgent.java
License:Open Source License
void initS3Client() { ClientConfiguration configuration = new ClientConfiguration(); configuration.setSocketTimeout(EXTENDED_SO_TIMEOUT); s3 = new AmazonS3Client(new ClasspathPropertiesFileCredentialsProvider(), configuration); Region usWest2 = Region.getRegion(Regions.AP_SOUTHEAST_2); s3.setRegion(usWest2);/*www. ja va 2s. co m*/ }
From source file:com.uiintl.backup.agent.samples.S3Sample.java
License:Open Source License
public static void main2(String[] args) throws IOException { /*//from w ww . ja v a2 s. com * This credentials provider implementation loads your AWS credentials * from a properties file at the root of your classpath. * * Important: Be sure to fill in your AWS access credentials in the * AwsCredentials.properties file before you try to run this * sample. * http://aws.amazon.com/security-credentials */ AmazonS3 s3 = new AmazonS3Client(new ClasspathPropertiesFileCredentialsProvider()); Region usWest2 = Region.getRegion(Regions.AP_SOUTHEAST_2); s3.setRegion(usWest2); String bucketName = "my-first-s3-bucket-" + UUID.randomUUID(); String key = "MyObjectKey"; System.out.println("==========================================="); System.out.println("Getting Started with Amazon S3"); System.out.println("===========================================\n"); try { /* * Create a new S3 bucket - Amazon S3 bucket names are globally unique, * so once a bucket name has been taken by any user, you can't create * another bucket with that same name. * * You can optionally specify a location for your bucket if you want to * keep your data closer to your applications or users. */ System.out.println("Creating bucket " + bucketName + "\n"); s3.createBucket(bucketName); /* * List the buckets in your account */ System.out.println("Listing buckets"); for (Bucket bucket : s3.listBuckets()) { System.out.println(" - " + bucket.getName()); } System.out.println(); /* * Upload an object to your bucket - You can easily upload a file to * S3, or upload directly an InputStream if you know the length of * the data in the stream. You can also specify your own metadata * when uploading to S3, which allows you set a variety of options * like content-type and content-encoding, plus additional metadata * specific to your applications. */ System.out.println("Uploading a new object to S3 from a file\n"); s3.putObject(new PutObjectRequest(bucketName, key, createSampleFile())); /* * Download an object - When you download an object, you get all of * the object's metadata and a stream from which to read the contents. * It's important to read the contents of the stream as quickly as * possibly since the data is streamed directly from Amazon S3 and your * network connection will remain open until you read all the data or * close the input stream. * * GetObjectRequest also supports several other options, including * conditional downloading of objects based on modification times, * ETags, and selectively downloading a range of an object. */ System.out.println("Downloading an object"); S3Object object = s3.getObject(new GetObjectRequest(bucketName, key)); System.out.println("Content-Type: " + object.getObjectMetadata().getContentType()); displayTextInputStream(object.getObjectContent()); /* * List objects in your bucket by prefix - There are many options for * listing the objects in your bucket. Keep in mind that buckets with * many objects might truncate their results when listing their objects, * so be sure to check if the returned object listing is truncated, and * use the AmazonS3.listNextBatchOfObjects(...) operation to retrieve * additional results. */ System.out.println("Listing objects"); ObjectListing objectListing = s3 .listObjects(new ListObjectsRequest().withBucketName(bucketName).withPrefix("My")); for (S3ObjectSummary objectSummary : objectListing.getObjectSummaries()) { System.out.println( " - " + objectSummary.getKey() + " " + "(size = " + objectSummary.getSize() + ")"); } System.out.println(); /* * Delete an object - Unless versioning has been turned on for your bucket, * there is no way to undelete an object, so use caution when deleting objects. */ System.out.println("Deleting an object\n"); s3.deleteObject(bucketName, key); /* * Delete a bucket - A bucket must be completely empty before it can be * deleted, so remember to delete any objects from your buckets before * you try to delete them. */ System.out.println("Deleting bucket " + bucketName + "\n"); s3.deleteBucket(bucketName); } catch (AmazonServiceException ase) { System.out.println("Caught an AmazonServiceException, which means your request made it " + "to Amazon S3, but was rejected with an error response for some reason."); System.out.println("Error Message: " + ase.getMessage()); System.out.println("HTTP Status Code: " + ase.getStatusCode()); System.out.println("AWS Error Code: " + ase.getErrorCode()); System.out.println("Error Type: " + ase.getErrorType()); System.out.println("Request ID: " + ase.getRequestId()); } catch (AmazonClientException ace) { System.out.println("Caught an AmazonClientException, which means the client encountered " + "a serious internal problem while trying to communicate with S3, " + "such as not being able to access the network."); System.out.println("Error Message: " + ace.getMessage()); } }
From source file:cs.iit.edu.ckmr.worker.TableCreator.java
License:Open Source License
public TableCreator() { dynamoDB = new AmazonDynamoDBClient(new ClasspathPropertiesFileCredentialsProvider()); Region usEast1 = Region.getRegion(Regions.US_EAST_1); dynamoDB.setRegion(usEast1);//from ww w . j a v a2s . c o m }
From source file:exemplos.S3Sample.java
License:Open Source License
public static void main(String[] args) throws IOException { /*// ww w. java2 s . c om * This credentials provider implementation loads your AWS credentials * from a properties file at the root of your classpath. * * Important: Be sure to fill in your AWS access credentials in the * AwsCredentials.properties file before you try to run this * sample. * http://aws.amazon.com/security-credentials */ AmazonS3 s3 = new AmazonS3Client(new ClasspathPropertiesFileCredentialsProvider()); Region usWest2 = Region.getRegion(Regions.US_WEST_2); s3.setRegion(usWest2); String bucketName = "my-first-s3-bucket-" + UUID.randomUUID(); String key = "MyObjectKey"; System.out.println("==========================================="); System.out.println("Getting Started with Amazon S3"); System.out.println("===========================================\n"); try { /* * Create a new S3 bucket - Amazon S3 bucket names are globally unique, * so once a bucket name has been taken by any user, you can't create * another bucket with that same name. * * You can optionally specify a location for your bucket if you want to * keep your data closer to your applications or users. */ System.out.println("Creating bucket " + bucketName + "\n"); s3.createBucket(bucketName); /* * List the buckets in your account */ System.out.println("Listing buckets"); for (Bucket bucket : s3.listBuckets()) { System.out.println(" - " + bucket.getName()); } System.out.println(); /* * Upload an object to your bucket - You can easily upload a file to * S3, or upload directly an InputStream if you know the length of * the data in the stream. You can also specify your own metadata * when uploading to S3, which allows you set a variety of options * like content-type and content-encoding, plus additional metadata * specific to your applications. */ System.out.println("Uploading a new object to S3 from a file\n"); s3.putObject(new PutObjectRequest(bucketName, key, createSampleFile())); /* * Download an object - When you download an object, you get all of * the object's metadata and a stream from which to read the contents. * It's important to read the contents of the stream as quickly as * possibly since the data is streamed directly from Amazon S3 and your * network connection will remain open until you read all the data or * close the input stream. * * GetObjectRequest also supports several other options, including * conditional downloading of objects based on modification times, * ETags, and selectively downloading a range of an object. */ System.out.println("Downloading an object"); S3Object object = s3.getObject(new GetObjectRequest(bucketName, key)); System.out.println("Content-Type: " + object.getObjectMetadata().getContentType()); displayTextInputStream(object.getObjectContent()); /* * List objects in your bucket by prefix - There are many options for * listing the objects in your bucket. Keep in mind that buckets with * many objects might truncate their results when listing their objects, * so be sure to check if the returned object listing is truncated, and * use the AmazonS3.listNextBatchOfObjects(...) operation to retrieve * additional results. */ System.out.println("Listing objects"); ObjectListing objectListing = s3 .listObjects(new ListObjectsRequest().withBucketName(bucketName).withPrefix("My")); for (S3ObjectSummary objectSummary : objectListing.getObjectSummaries()) { System.out.println( " - " + objectSummary.getKey() + " " + "(size = " + objectSummary.getSize() + ")"); } System.out.println(); /* * Delete an object - Unless versioning has been turned on for your bucket, * there is no way to undelete an object, so use caution when deleting objects. */ System.out.println("Deleting an object\n"); s3.deleteObject(bucketName, key); /* * Delete a bucket - A bucket must be completely empty before it can be * deleted, so remember to delete any objects from your buckets before * you try to delete them. */ System.out.println("Deleting bucket " + bucketName + "\n"); s3.deleteBucket(bucketName); } catch (AmazonServiceException ase) { System.out.println("Caught an AmazonServiceException, which means your request made it " + "to Amazon S3, but was rejected with an error response for some reason."); System.out.println("Error Message: " + ase.getMessage()); System.out.println("HTTP Status Code: " + ase.getStatusCode()); System.out.println("AWS Error Code: " + ase.getErrorCode()); System.out.println("Error Type: " + ase.getErrorType()); System.out.println("Request ID: " + ase.getRequestId()); } catch (AmazonClientException ace) { System.out.println("Caught an AmazonClientException, which means the client encountered " + "a serious internal problem while trying to communicate with S3, " + "such as not being able to access the network."); System.out.println("Error Message: " + ace.getMessage()); } }
From source file:io.milton.s3.AmazonS3ManagerImpl.java
License:Open Source License
/** * You can choose the geographical region where Amazon S3 will store the * buckets you create. You might choose a region to optimize latency, * minimize costs, or address regulator requirements. * /*from w ww . j av a 2s .c o m*/ * @param region */ public AmazonS3ManagerImpl(Region region) { LOG.info("Create an instance of the AmazonS3Client class by providing your " + "AWS Account or IAM user credentials (Access Key ID, Secret Access Key)"); // Create an instance of the AmazonS3Client class by providing your AWS // Account or IAM user credentials (Access Key ID, Secret Access Key) amazonS3Client = new AmazonS3Client(new ClasspathPropertiesFileCredentialsProvider()); amazonS3Client.setRegion(region); }
From source file:io.milton.s3.db.DynamoDBServiceImpl.java
License:Open Source License
/** * The only information needed to create a client are security credentials * consisting of the AWS Access Key ID and Secret Access Key. All other * configuration, such as the service endpoints, are performed * automatically. Client parameters, such as proxies, can be specified in an * optional ClientConfiguration object when constructing a client. * * @see com.amazonaws.auth.ClasspathPropertiesFileCredentialsProvider * @see com.amazonaws.regions.Region/* w w w . j a v a2 s . c o m*/ */ public DynamoDBServiceImpl(Region region) { LOG.info("Initialize Amazon DynamoDB environment...!!!"); dynamoDBClient = new AmazonDynamoDBClient(new ClasspathPropertiesFileCredentialsProvider()); dynamoDBClient.setRegion(region); }
From source file:net.geoprism.data.aws.AmazonEndpoint.java
License:Open Source License
@Override public void copyFiles(File directory, List<String> keys, boolean preserveDirectories) { try {//w w w. ja v a2s .c o m List<File> files = new LinkedList<File>(); AmazonS3 client = new AmazonS3Client(new ClasspathPropertiesFileCredentialsProvider()); for (String key : keys) { GetObjectRequest request = new GetObjectRequest("geodashboarddata", key); S3Object object = client.getObject(request); InputStream istream = object.getObjectContent(); try { String targetPath = this.getTargetPath(preserveDirectories, key); File file = new File(directory, targetPath); FileUtils.copyInputStreamToFile(istream, file); files.add(file); } finally { // Process the objectData stream. istream.close(); } } } catch (IOException e) { throw new ProgrammingErrorException(e); } }
From source file:net.geoprism.data.aws.AmazonEndpoint.java
License:Open Source License
private List<String> listFiles(String prefix) { List<String> files = new LinkedList<String>(); try {//from w w w .jav a 2 s. c o m AmazonS3 s3Client = new AmazonS3Client(new ClasspathPropertiesFileCredentialsProvider()); ListObjectsRequest request = new ListObjectsRequest(); request = request.withBucketName("geodashboarddata"); request = request.withPrefix(prefix); ObjectListing listing; do { listing = s3Client.listObjects(request); List<S3ObjectSummary> summaries = listing.getObjectSummaries(); for (S3ObjectSummary summary : summaries) { String key = summary.getKey(); if (key.endsWith(".xml.gz")) { files.add(key); } } request.setMarker(listing.getNextMarker()); } while (listing != null && listing.isTruncated()); } catch (Exception e) { logger.error("Unable to retrieve files", e); } return files; }
From source file:net.oletalk.hellospringboot.dao.S3Dao.java
public void uploadFile(String bucketName, String key, File file) throws S3Exception { AmazonS3 s3client = new AmazonS3Client(new ClasspathPropertiesFileCredentialsProvider()); try {// www .ja v a 2s . co m LOG.info("Uploading file to S3"); s3client.putObject(new PutObjectRequest(bucketName, key, file)); } catch (AmazonServiceException ase) { LOG.error("Problem uploading file to S3: " + ase.getMessage() + " (status code " + ase.getStatusCode() + ")"); throw new S3Exception("Problem uploading file to S3"); } catch (AmazonClientException ace) { LOG.error("Internal error uploading file to S3: " + ace.getMessage()); throw new S3Exception("Problem uploading file to S3"); } LOG.info("Upload complete"); }
From source file:net.oletalk.hellospringboot.dao.S3Dao.java
public void getObjectData(String bucketName, String key, OutputStream out) throws S3Exception { AmazonS3 s3client = new AmazonS3Client(new ClasspathPropertiesFileCredentialsProvider()); LOG.info("fetching requested object " + key); try {//ww w . j av a 2s . c o m S3Object object = s3client.getObject(new GetObjectRequest(bucketName, key)); LOG.info("fetched, serving up"); try { IOUtils.copy(object.getObjectContent(), out); } catch (IOException ioe) { LOG.error("Problem writing message: " + ioe.getMessage()); } } catch (AmazonS3Exception e) { LOG.error("Problem fetching from S3: ", e); String errorMsg = "Error fetching document"; try { out.write(errorMsg.getBytes(Charset.defaultCharset())); throw new S3Exception("Error fetching document"); } catch (IOException ioe) { LOG.error("Problem writing message: " + ioe.getMessage()); } } }