List of usage examples for com.amazonaws.services.s3.model S3ObjectSummary getKey
public String getKey()
From source file:com.cloud.utils.S3Utils.java
License:Apache License
public static void deleteDirectory(final ClientOptions clientOptions, final String bucketName, final String directoryName) { assert clientOptions != null; assert isNotBlank(bucketName); assert isNotBlank(directoryName); final AmazonS3 client = acquireClient(clientOptions); final List<S3ObjectSummary> objects = listDirectory(bucketName, directoryName, client); for (final S3ObjectSummary object : objects) { client.deleteObject(bucketName, object.getKey()); }/*from ww w . ja v a 2 s.c o m*/ client.deleteObject(bucketName, directoryName); }
From source file:com.clouddrive.parth.AmazonOperations.java
public List<String> getFiles(String userName) { List<String> list = null; if (isBucketPresent(userName)) { ObjectListing bucketList = s3.listObjects(new ListObjectsRequest().withBucketName(userName)); if (bucketList != null) { list = new ArrayList<String>(); }/*from w w w .ja va 2 s . c om*/ for (S3ObjectSummary objectSummary : bucketList.getObjectSummaries()) { list.add(objectSummary.getKey()); } } return list; }
From source file:com.cloudhub.aws.extractor.AWSCSVExtractor.java
License:Apache License
/** * Requests billing information from Amazon S3. * This method may spawn multiple threads as needed to complete the task. * *//* ww w . j av a 2 s . c o m*/ @Override public String getTotalCost() { String totalCost = null; try { log.debug("Listing objects ..."); final ListObjectsRequest listObjectsRequest = new ListObjectsRequest().withBucketName(bucketName); ObjectListing objectListing; do { objectListing = s3client.listObjects(listObjectsRequest); for (final S3ObjectSummary objectSummary : objectListing.getObjectSummaries()) { log.debug(" - " + objectSummary.getKey() + " " + "(size = " + objectSummary.getSize() + ")"); if (objectSummary.getKey().contains(Constants.MATCHER_BILLING_CSV.getKeyPattern())) { totalCost = persist(Constants.MATCHER_BILLING_CSV, objectSummary); } else if (objectSummary.getKey().contains(Constants.MATCHER_COST_ALLOCATION.getKeyPattern())) { totalCost = persist(Constants.MATCHER_COST_ALLOCATION, objectSummary); } } listObjectsRequest.setMarker(objectListing.getNextMarker()); } while (objectListing.isTruncated()); } catch (AmazonServiceException ase) { log.error("Caught an AmazonServiceException, " + "which means your request made it " + "to Amazon S3, but was rejected with an error response " + "for some reason."); log.error("Error Message: " + ase.getMessage()); log.error("HTTP Status Code: " + ase.getStatusCode()); log.error("AWS Error Code: " + ase.getErrorCode()); log.error("Error Type: " + ase.getErrorType()); log.error("Request ID: " + ase.getRequestId()); } catch (AmazonClientException ace) { log.error("Caught an AmazonClientException, " + "which means the client encountered " + "an internal error while trying to communicate" + " with S3, " + "such as not being able to access the network."); log.error("Error Message: " + ace.getMessage()); } catch (IOException ioe) { log.error("Caught an IOException while writing to disk."); log.error("Error Message: " + ioe.getMessage()); } return totalCost; }
From source file:com.cloudhub.aws.extractor.AWSCSVExtractor.java
License:Apache License
/** * Persists the Amazon S3 object to disk * * @param objectSummary - the S3 object to be persisted to disk. * @throws IOException - if an I/O error occurs. *//*w ww. j a va 2s .co m*/ private String persist(final CSVMatcher matcher, final S3ObjectSummary objectSummary) throws IOException { log.debug("Downloading the body of " + objectSummary.getKey() + " from Amazon S3."); final S3Object object = s3client.getObject(bucketName, objectSummary.getKey()); log.debug("Downloaded " + objectSummary.getSize() + " bytes."); log.debug("Writing the body of " + objectSummary.getKey() + " to disk path: " + dataFolder + File.separator + bucketName); final File objectCSVFile = writeOutObjectToFile(objectSummary, object.getObjectContent()); return getTotal(matcher, objectCSVFile); }
From source file:com.cloudhub.aws.extractor.AWSCSVExtractor.java
License:Apache License
/** * Writes the specified Amazon S3 object to the file system. * * @param object - the object to write out * @param inputStream - the body of the object * @throws IOException - if any I/O error occurs. *//*from w w w . j a va 2 s. co m*/ @SuppressWarnings("resource") private File writeOutObjectToFile(final S3ObjectSummary object, final InputStream inputStream) throws IOException { final File parent = new File(dataFolder, object.getBucketName()); if (!parent.exists()) { parent.mkdirs(); } final File objectFile = new File(parent, object.getKey()); final ReadableByteChannel src = Channels.newChannel(inputStream); final FileChannel dest = new FileOutputStream(objectFile).getChannel(); dest.transferFrom(src, 0, object.getSize()); dest.close(); src.close(); return objectFile; }
From source file:com.conductor.s3.S3InputFormatUtils.java
License:Apache License
/** * Efficiently gets the Hadoop {@link org.apache.hadoop.fs.FileStatus} for all S3 files under the provided * {@code dirs}/*from w w w . j a v a2 s.co m*/ * * @param s3Client * s3 client * @param blockSize * the block size * @param dirs * the dirs to search through * @return the {@link org.apache.hadoop.fs.FileStatus} version of all S3 files under {@code dirs} */ static List<FileStatus> getFileStatuses(final AmazonS3 s3Client, final long blockSize, final Path... dirs) { final List<FileStatus> result = Lists.newArrayList(); for (final Path dir : dirs) { // get bucket and prefix from path final String bucket = S3HadoopUtils.getBucketFromPath(dir.toString()); final String prefix = S3HadoopUtils.getKeyFromPath(dir.toString()); // list request final ListObjectsRequest req = new ListObjectsRequest().withMaxKeys(Integer.MAX_VALUE) .withBucketName(bucket).withPrefix(prefix); // recursively page through all objects under the path for (ObjectListing listing = s3Client.listObjects(req); listing.getObjectSummaries() .size() > 0; listing = s3Client.listNextBatchOfObjects(listing)) { for (final S3ObjectSummary summary : listing.getObjectSummaries()) { final Path path = new Path( String.format("s3n://%s/%s", summary.getBucketName(), summary.getKey())); if (S3_PATH_FILTER.accept(path)) { result.add(new FileStatus(summary.getSize(), false, 1, blockSize, summary.getLastModified().getTime(), path)); } } // don't need to check the next listing if this one is not truncated if (!listing.isTruncated()) { break; } } } return result; }
From source file:com.crickdata.upload.s3.UploadLiveData.java
License:Open Source License
public Map<String, Date> uploadToS3(String fileName, boolean type) throws IOException { Statistics statistics = new Statistics(); Map<String, Date> perfMap = new HashMap<String, Date>(); AWSCredentials credentials = null;//from www . j a v a 2s .com try { credentials = new BasicAWSCredentials("AKIAI6QKTRAQE7MXQOIQ", "wIG6u1yI5ZaseeJbvYSUmD98qelIJNSCVBzt5k2q"); } catch (Exception e) { throw new AmazonClientException("Cannot load the credentials from the credential profiles file. " + "Please make sure that your credentials file is at the correct " + "location (C:\\Users\\bssan_000\\.aws\\credentials), and is in valid format.", e); } AmazonS3 s3 = new AmazonS3Client(credentials); Region usWest2 = Region.getRegion(Regions.US_WEST_2); s3.setRegion(usWest2); String bucketName; if (!type) bucketName = "cricmatchinfo"; else bucketName = "cricmatchinfoseries"; String key = fileName.replace(".json", "").trim(); try { perfMap.put("S3INSERTREQ", new Date()); statistics.setS3Req(new Date()); File f = readMatchFile(fileName); double bytes = f.length(); double kilobytes = (bytes / 1024); System.out.println("Details :" + kilobytes); s3.putObject(new PutObjectRequest(bucketName, key, f)); statistics.setSize(String.valueOf(kilobytes)); S3Object object = s3.getObject(new GetObjectRequest(bucketName, key)); perfMap.put("S3SAVERES", object.getObjectMetadata().getLastModified()); statistics.setKey(key); statistics.setS3Res(object.getObjectMetadata().getLastModified()); MyUI.stats.add(statistics); displayTextInputStream(object.getObjectContent()); ObjectListing objectListing = s3 .listObjects(new ListObjectsRequest().withBucketName(bucketName).withPrefix("My")); for (S3ObjectSummary objectSummary : objectListing.getObjectSummaries()) { System.out.println( " - " + objectSummary.getKey() + " " + "(size = " + objectSummary.getSize() + ")"); } } catch (AmazonServiceException ase) { System.out.println("Caught an AmazonServiceException, which means your request made it " + "to Amazon S3, but was rejected with an error response for some reason."); System.out.println("Error Message: " + ase.getMessage()); System.out.println("HTTP Status Code: " + ase.getStatusCode()); System.out.println("AWS Error Code: " + ase.getErrorCode()); System.out.println("Error Type: " + ase.getErrorType()); System.out.println("Request ID: " + ase.getRequestId()); } catch (AmazonClientException ace) { System.out.println("Caught an AmazonClientException, which means the client encountered " + "a serious internal problem while trying to communicate with S3, " + "such as not being able to access the network."); System.out.println("Error Message: " + ace.getMessage()); } return perfMap; }
From source file:com.davidsoergel.s3napback.S3ops.java
License:Apache License
public static void delete(TransferManager tx, String bucket, String fileprefix) throws InterruptedException { logger.info("Deleting " + fileprefix); List<DeleteObjectsRequest.KeyVersion> keys = new ArrayList<DeleteObjectsRequest.KeyVersion>(); ObjectListing objectListing = tx.getAmazonS3Client() .listObjects(new ListObjectsRequest().withBucketName(bucket).withPrefix(fileprefix)); for (S3ObjectSummary objectSummary : objectListing.getObjectSummaries()) { keys.add(new DeleteObjectsRequest.KeyVersion(objectSummary.getKey())); }//w w w .ja v a 2s .c o m DeleteObjectsRequest req = new DeleteObjectsRequest(bucket); req.setKeys(keys); DeleteObjectsResult result = tx.getAmazonS3Client().deleteObjects(req); }
From source file:com.davidsoergel.s3napback.S3ops.java
License:Apache License
public static void list(StreamingTransferManager tx, String bucket) throws InterruptedException { //** sort by date SortedMap<String, SortedMap<String, S3ObjectSummary>> blocks = new TreeMap<String, SortedMap<String, S3ObjectSummary>>(); ObjectListing current = tx.getAmazonS3Client().listObjects(new ListObjectsRequest().withBucketName(bucket)); List<S3ObjectSummary> keyList = current.getObjectSummaries(); ObjectListing next = tx.getAmazonS3Client().listNextBatchOfObjects(current); keyList.addAll(next.getObjectSummaries()); while (next.isTruncated()) { current = tx.getAmazonS3Client().listNextBatchOfObjects(next); keyList.addAll(current.getObjectSummaries()); next = tx.getAmazonS3Client().listNextBatchOfObjects(current); }/*from w w w .j av a2 s. co m*/ keyList.addAll(next.getObjectSummaries()); for (S3ObjectSummary objectSummary : keyList) { String[] c = objectSummary.getKey().split(":"); if (c.length != 2) { logger.warn("ignoring malformed filename " + objectSummary.getKey()); } else { String filename = c[0]; String chunknum = c[1]; SortedMap<String, S3ObjectSummary> chunks = blocks.get(filename); if (chunks == null) { chunks = new TreeMap<String, S3ObjectSummary>(); blocks.put(filename, chunks); } chunks.put(chunknum, objectSummary); } } // now the files and chunks are in the maps in order for (Map.Entry<String, SortedMap<String, S3ObjectSummary>> blockEntry : blocks.entrySet()) { String filename = blockEntry.getKey(); SortedMap<String, S3ObjectSummary> chunks = blockEntry.getValue(); long totalsize = 0; Date lastModified = null; for (Map.Entry<String, S3ObjectSummary> entry : chunks.entrySet()) { totalsize += entry.getValue().getSize(); lastModified = entry.getValue().getLastModified(); } String[] line = { bucket, filename, "" + chunks.keySet().size(), "" + totalsize, lastModified.toString() }; System.err.println(StringUtils.join("\t", line)); // 2008-04-10 04:07:50 - dev.davidsoergel.com.backup1:MySQL/all-0 - 153.38k in 1 data blocks } }
From source file:com.davidsoergel.s3napback.S3ops.java
License:Apache License
public static void download(StreamingTransferManager tx, String bucket, String fileprefix) throws InterruptedException, IOException { // first list the files SortedMap<String, S3ObjectSummary> chunks = new TreeMap<String, S3ObjectSummary>(); ObjectListing objectListing = tx.getAmazonS3Client() .listObjects(new ListObjectsRequest().withBucketName(bucket).withPrefix(fileprefix)); for (S3ObjectSummary objectSummary : objectListing.getObjectSummaries()) { chunks.put(objectSummary.getKey(), objectSummary); }/* ww w .jav a2 s . c o m*/ logger.info("Downloading " + fileprefix); Date start = new Date(); // now the chunks are in the map in order long totalBytes = 0; BufferedOutputStream out = new BufferedOutputStream(System.out); for (Map.Entry<String, S3ObjectSummary> entry : chunks.entrySet()) { String key = entry.getKey(); logger.info("Downloading " + key); Download myDownload = tx.download(bucket, key, out); while (myDownload.isDone() == false) { long bytes = totalBytes + myDownload.getProgress().getBytesTransfered(); Double mb = (double) bytes / 1024. / 1024.; Double sec = (new Date().getTime() - start.getTime()) / 1000.; Double rate = mb / sec; logger.info(String.format("%.2f MB, %.2fMB/s", mb, rate)); // Do work while we wait for our upload to complete... Thread.sleep(500); } totalBytes += myDownload.getProgress().getBytesTransfered(); } out.close(); Long bytes = totalBytes; Double mb = (double) bytes / 1024. / 1024.; Double sec = (new Date().getTime() - start.getTime()) / 1000.; Double rate = mb / sec; logger.info(String.format("Downloaded %s to stdout, %d bytes, %.2f sec, %.2fMB/s", fileprefix, totalBytes, sec, rate)); //logger.info("Downloaded " + fileprefix + " to stdout, " + totalBytes + " bytes, " + sec +" sec, " + rate + " MB/sec"); }