List of usage examples for com.amazonaws.services.s3 AmazonS3 listObjects
public ObjectListing listObjects(ListObjectsRequest listObjectsRequest) throws SdkClientException, AmazonServiceException;
Returns a list of summary information about the objects in the specified bucket.
From source file:org.alanwilliamson.amazon.s3.List.java
License:Open Source License
public cfData execute(cfSession _session, cfArgStructData argStruct) throws cfmRunTimeException { AmazonKey amazonKey = getAmazonKey(_session, argStruct); AmazonS3 s3Client = getAmazonS3(amazonKey); String bucket = getNamedStringParam(argStruct, "bucket", null); String prefix = getNamedStringParam(argStruct, "prefix", ""); if (bucket == null) throwException(_session, "Please specify a bucket"); try {// w w w .ja va 2s . com // Create the results cfQueryResultData qD = new cfQueryResultData(new String[] { "key", "size", "modified", "etag" }, null); qD.setQuerySource("AmazonS3." + amazonKey.getDataSource()); ListObjectsRequest listObjectsRequest = new ListObjectsRequest().withBucketName(bucket) .withDelimiter("/").withPrefix(prefix); ObjectListing objectListing; do { objectListing = s3Client.listObjects(listObjectsRequest); java.util.List<String> prefixes = objectListing.getCommonPrefixes(); // first add the prefixes for (String nextPrefix : prefixes) { qD.addRow(1); qD.setCurrentRow(qD.getSize()); qD.setCell(1, new cfStringData(nextPrefix)); qD.setCell(2, new cfNumberData(0)); qD.setCell(3, cfNullData.NULL); qD.setCell(4, cfNullData.NULL); } for (S3ObjectSummary objectSummary : objectListing.getObjectSummaries()) { // don't include the prefix being listed if (objectSummary.getKey().equals(prefix)) { continue; } qD.addRow(1); qD.setCurrentRow(qD.getSize()); qD.setCell(1, new cfStringData(objectSummary.getKey())); qD.setCell(2, new cfNumberData(objectSummary.getSize())); qD.setCell(3, new cfDateData(objectSummary.getLastModified())); qD.setCell(4, new cfStringData(objectSummary.getETag())); } listObjectsRequest.setMarker(objectListing.getNextMarker()); } while (objectListing.isTruncated()); return qD; } catch (Exception e) { throwException(_session, "AmazonS3: " + e.getMessage()); return cfBooleanData.FALSE; } }
From source file:org.apache.jackrabbit.aws.ext.Utils.java
License:Apache License
/** * Delete S3 bucket. This method first deletes all objects from bucket and * then delete empty bucket./*w w w . j a va 2 s.c o m*/ * * @param bucketName the bucket name. */ public static void deleteBucket(final String bucketName) throws IOException { Properties prop = readConfig(DEFAULT_CONFIG_FILE); AmazonS3 s3service = openService(prop); ObjectListing prevObjectListing = s3service.listObjects(bucketName); while (true) { for (S3ObjectSummary s3ObjSumm : prevObjectListing.getObjectSummaries()) { s3service.deleteObject(bucketName, s3ObjSumm.getKey()); } if (!prevObjectListing.isTruncated()) { break; } prevObjectListing = s3service.listNextBatchOfObjects(prevObjectListing); } s3service.deleteBucket(bucketName); }
From source file:org.boriken.s3fileuploader.S3SampleRefactored.java
License:Open Source License
public static void listFiles(AmazonS3 s3, String bucketName, String prefix) { /*//from w ww .ja va2 s .co m * List objects in your bucket by prefix - There are many options for * listing the objects in your bucket. Keep in mind that buckets with * many objects might truncate their results when listing their objects, * so be sure to check if the returned object listing is truncated, and * use the AmazonS3.listNextBatchOfObjects(...) operation to retrieve * additional results. */ System.out.println("Listing objects"); ObjectListing objectListing = s3 .listObjects(new ListObjectsRequest().withBucketName(bucketName).withPrefix(prefix)); for (S3ObjectSummary objectSummary : objectListing.getObjectSummaries()) { System.out.println(" - " + objectSummary.getKey() + " " + "(size = " + objectSummary.getSize() + ")"); } System.out.println(); }
From source file:org.cto.VVS3Box.S3Sample.java
License:Open Source License
public static void main(String[] args) throws IOException { /*/*from w ww . j av a2 s . c om*/ * This credentials provider implementation loads your AWS credentials * from a properties file at the root of your classpath. * * Important: Be sure to fill in your AWS access credentials in the * AwsCredentials.properties file before you try to run this * sample. * http://aws.amazon.com/security-credentials */ AmazonS3 s3 = new AmazonS3Client(new ClasspathPropertiesFileCredentialsProvider()); Region usWest2 = Region.getRegion(Regions.US_WEST_2); s3.setRegion(usWest2); String bucketName = "lior.test-" + UUID.randomUUID(); String key = "MyObjectKey"; System.out.println("==========================================="); System.out.println("Getting Started with Amazon S3"); System.out.println("===========================================\n"); try { /* * Create a new S3 bucket - Amazon S3 bucket names are globally unique, * so once a bucket name has been taken by any user, you can't create * another bucket with that same name. * * You can optionally specify a location for your bucket if you want to * keep your data closer to your applications or users. */ System.out.println("Creating bucket " + bucketName + "\n"); s3.createBucket(bucketName); /* * List the buckets in your account */ System.out.println("Listing buckets"); for (Bucket bucket : s3.listBuckets()) { System.out.println(" - " + bucket.getName()); } System.out.println(); /* * Upload an object to your bucket - You can easily upload a file to * S3, or upload directly an InputStream if you know the length of * the data in the stream. You can also specify your own metadata * when uploading to S3, which allows you set a variety of options * like content-type and content-encoding, plus additional metadata * specific to your applications. */ System.out.println("Uploading a new object to S3 from a file\n"); s3.putObject(new PutObjectRequest(bucketName, key, createSampleFile())); /* * Download an object - When you download an object, you get all of * the object's metadata and a stream from which to read the contents. * It's important to read the contents of the stream as quickly as * possibly since the data is streamed directly from Amazon S3 and your * network connection will remain open until you read all the data or * close the input stream. * * GetObjectRequest also supports several other options, including * conditional downloading of objects based on modification times, * ETags, and selectively downloading a range of an object. */ System.out.println("Downloading an object"); S3Object object = s3.getObject(new GetObjectRequest(bucketName, key)); System.out.println("Content-Type: " + object.getObjectMetadata().getContentType()); displayTextInputStream(object.getObjectContent()); /* * List objects in your bucket by prefix - There are many options for * listing the objects in your bucket. Keep in mind that buckets with * many objects might truncate their results when listing their objects, * so be sure to check if the returned object listing is truncated, and * use the AmazonS3.listNextBatchOfObjects(...) operation to retrieve * additional results. */ System.out.println("Listing objects"); ObjectListing objectListing = s3 .listObjects(new ListObjectsRequest().withBucketName(bucketName).withPrefix("My")); for (S3ObjectSummary objectSummary : objectListing.getObjectSummaries()) { System.out.println( " - " + objectSummary.getKey() + " " + "(size = " + objectSummary.getSize() + ")"); } System.out.println(); /* * Delete an object - Unless versioning has been turned on for your bucket, * there is no way to undelete an object, so use caution when deleting objects. */ System.out.println("Deleting an object\n"); s3.deleteObject(bucketName, key); /* * Delete a bucket - A bucket must be completely empty before it can be * deleted, so remember to delete any objects from your buckets before * you try to delete them. */ System.out.println("Deleting bucket " + bucketName + "\n"); s3.deleteBucket(bucketName); } catch (AmazonServiceException ase) { System.out.println("Caught an AmazonServiceException, which means your request made it " + "to Amazon S3, but was rejected with an error response for some reason."); System.out.println("Error Message: " + ase.getMessage()); System.out.println("HTTP Status Code: " + ase.getStatusCode()); System.out.println("AWS Error Code: " + ase.getErrorCode()); System.out.println("Error Type: " + ase.getErrorType()); System.out.println("Request ID: " + ase.getRequestId()); } catch (AmazonClientException ace) { System.out.println("Caught an AmazonClientException, which means the client encountered " + "a serious internal problem while trying to communicate with S3, " + "such as not being able to access the network."); System.out.println("Error Message: " + ace.getMessage()); } }
From source file:org.deeplearning4j.aws.s3.reader.S3Downloader.java
License:Apache License
/** * Return the keys for a bucket//from w w w . j a va 2s .co m * @param bucket the bucket to get the keys for * @return the bucket's keys */ public List<String> keysForBucket(String bucket) { AmazonS3 s3 = getClient(); List<String> ret = new ArrayList<>(); ListObjectsRequest listObjectsRequest = new ListObjectsRequest().withBucketName(bucket); ObjectListing objectListing; do { objectListing = s3.listObjects(listObjectsRequest); for (S3ObjectSummary objectSummary : objectListing.getObjectSummaries()) { ret.add(objectSummary.getKey()); } listObjectsRequest.setMarker(objectListing.getNextMarker()); } while (objectListing.isTruncated()); return ret; }
From source file:org.deeplearning4j.aws.s3.reader.S3Downloader.java
License:Apache License
/** * Simple way of retrieving the listings for a bucket * @param bucket the bucket to retrieve listings for * @return the object listing for this bucket *//* ww w .j a va 2s.c om*/ public ObjectListing listObjects(String bucket) { AmazonS3 s3 = getClient(); ObjectListing list = s3.listObjects(bucket); return list; }
From source file:org.deeplearning4j.aws.s3.reader.S3Downloader.java
License:Apache License
/** * Paginates through a bucket's keys invoking the listener * at each key//from w ww .j a v a 2 s .c o m * @param bucket the bucket to iterate * @param listener the listener */ public void paginate(String bucket, BucketKeyListener listener) { AmazonS3 s3 = getClient(); ObjectListing list = s3.listObjects(bucket); for (S3ObjectSummary summary : list.getObjectSummaries()) { if (listener != null) listener.onKey(s3, bucket, summary.getKey()); } while (list.isTruncated()) { list = s3.listNextBatchOfObjects(list); for (S3ObjectSummary summary : list.getObjectSummaries()) { if (listener != null) listener.onKey(s3, bucket, summary.getKey()); } } }
From source file:org.finra.herd.dao.impl.S3OperationsImpl.java
License:Apache License
@Override public ObjectListing listObjects(ListObjectsRequest listObjectsRequest, AmazonS3 s3Client) { return s3Client.listObjects(listObjectsRequest); }
From source file:org.nuxeo.liveconnect.importer.aws.S3Importer.java
License:Apache License
public void importBucket(DocumentModel rootFolder) { LiveconnectS3Blobprovider blobprovider = (LiveconnectS3Blobprovider) Framework.getService(BlobManager.class) .getBlobProvider(provider);/*from ww w. java 2s . co m*/ AmazonS3 s3 = blobprovider.getClient(); String bucketName = blobprovider.getBucketName(); final ListObjectsRequest req = new ListObjectsRequest().withBucketName(bucketName); ObjectListing result; int docsCount = 0; do { result = s3.listObjects(req); for (S3ObjectSummary objectSummary : result.getObjectSummaries()) { String name = S3LiveConnectFile.getFilename(objectSummary.getKey()); if (name == null) continue; DocumentModel fileDoc = getOrCreateFileDocument(rootFolder, objectSummary.getKey(), name); //import object LiveConnectFileInfo info = new LiveConnectFileInfo( rootFolder.getCoreSession().getPrincipal().getName(), objectSummary.getKey(), objectSummary.getETag()); LiveConnectBlobProvider blobProvider = (LiveConnectBlobProvider) Framework .getService(BlobManager.class).getBlobProvider(provider); try { Blob blob = blobProvider.toBlob(info); fileDoc.setPropertyValue("file:content", (Serializable) blob); fileDoc.getCoreSession().saveDocument(fileDoc); } catch (IOException e) { log.warn("Couldn't get Blob with ID " + info.getFileId(), e); } docsCount++; if (docsCount % 10 == 0) { rootFolder.getCoreSession().save(); if (TransactionHelper.isTransactionActive()) { TransactionHelper.commitOrRollbackTransaction(); TransactionHelper.startTransaction(); } } } req.setMarker(result.getNextMarker()); } while (result.isTruncated()); rootFolder.getCoreSession().save(); if (TransactionHelper.isTransactionActive()) { TransactionHelper.commitOrRollbackTransaction(); TransactionHelper.startTransaction(); } }
From source file:org.p365.S3Sample.java
License:Open Source License
public static void main(String[] args) throws IOException { /*/*from w ww. ja v a2 s . c o m*/ * This credentials provider implementation loads your AWS credentials * from a properties file at the root of your classpath. * * Important: Be sure to fill in your AWS access credentials in the * AwsCredentials.properties file before you try to run this * sample. * http://aws.amazon.com/security-credentials */ AmazonS3 s3 = new AmazonS3Client(new ClasspathPropertiesFileCredentialsProvider()); Region usWest2 = Region.getRegion(Regions.US_WEST_2); s3.setRegion(usWest2); String bucketName = "mynewbuket"; String key = "Myobj/sd.jpg"; System.out.println("==========================================="); System.out.println("Getting Started with Amazon S3"); System.out.println("===========================================\n"); try { /* * Create a new S3 bucket - Amazon S3 bucket names are globally unique, * so once a bucket name has been taken by any user, you can't create * another bucket with that same name. * * You can optionally specify a location for your bucket if you want to * keep your data closer to your applications or users. */ System.out.println("Creating bucket " + bucketName + "\n"); if (!s3.doesBucketExist(bucketName)) { s3.createBucket(bucketName); } /* * List the buckets in your account */ System.out.println("Listing buckets"); for (Bucket bucket : s3.listBuckets()) { System.out.println(" - " + bucket.getName()); } System.out.println(); /* * Upload an object to your bucket - You can easily upload a file to * S3, or upload directly an InputStream if you know the length of * the data in the stream. You can also specify your own metadata * when uploading to S3, which allows you set a variety of options * like content-type and content-encoding, plus additional metadata * specific to your applications. */ System.out.println("Uploading a new object to S3 from a file\n"); String pathname = "D:\\Program Files\\apache-tomcat-7.0.42\\webapps\\WorkerForP365\\src\\AAA_1465.jpg"; File file = new File(pathname); s3.putObject( new PutObjectRequest(bucketName, key, file).withCannedAcl(CannedAccessControlList.PublicRead)); /* * Download an object - When you download an object, you get all of * the object's metadata and a stream from which to read the contents. * It's important to read the contents of the stream as quickly as * possibly since the data is streamed directly from Amazon S3 and your * network connection will remain open until you read all the data or * close the input stream. * * GetObjectRequest also supports several other options, including * conditional downloading of objects based on modification times, * ETags, and selectively downloading a range of an object. */ System.out.println("Downloading an object"); S3Object object = s3.getObject(new GetObjectRequest(bucketName, key)); System.out.println("Content-Type: " + object.getObjectMetadata().getContentType()); displayTextInputStream(object.getObjectContent()); /* * List objects in your bucket by prefix - There are many options for * listing the objects in your bucket. Keep in mind that buckets with * many objects might truncate their results when listing their objects, * so be sure to check if the returned object listing is truncated, and * use the AmazonS3.listNextBatchOfObjects(...) operation to retrieve * additional results. */ System.out.println("Listing objects"); ObjectListing objectListing = s3 .listObjects(new ListObjectsRequest().withBucketName(bucketName).withPrefix("My")); for (S3ObjectSummary objectSummary : objectListing.getObjectSummaries()) { System.out.println( " - " + objectSummary.getKey() + " " + "(size = " + objectSummary.getSize() + ")"); } System.out.println(); /* * Delete an object - Unless versioning has been turned on for your bucket, * there is no way to undelete an object, so use caution when deleting objects. */ //System.out.println("Deleting an object\n"); //s3.deleteObject(bucketName, key); /* * Delete a bucket - A bucket must be completely empty before it can be * deleted, so remember to delete any objects from your buckets before * you try to delete them. */ //System.out.println("Deleting bucket " + bucketName + "\n"); //s3.deleteBucket(bucketName); } catch (AmazonServiceException ase) { System.out.println("Caught an AmazonServiceException, which means your request made it " + "to Amazon S3, but was rejected with an error response for some reason."); System.out.println("Error Message: " + ase.getMessage()); System.out.println("HTTP Status Code: " + ase.getStatusCode()); System.out.println("AWS Error Code: " + ase.getErrorCode()); System.out.println("Error Type: " + ase.getErrorType()); System.out.println("Request ID: " + ase.getRequestId()); } catch (AmazonClientException ace) { System.out.println("Caught an AmazonClientException, which means the client encountered " + "a serious internal problem while trying to communicate with S3, " + "such as not being able to access the network."); System.out.println("Error Message: " + ace.getMessage()); } }