List of usage examples for com.amazonaws.services.s3.model S3Object getObjectContent
public S3ObjectInputStream getObjectContent()
From source file:com.github.rholder.esthree.command.Get.java
License:Apache License
public MessageDigest retryingGet() throws ExecutionException, RetryException { return (MessageDigest) RetryUtils.AWS_RETRYER.call(new Callable<Object>() { public MessageDigest call() throws Exception { GetObjectRequest req = new GetObjectRequest(bucket, key); S3Object s3Object = amazonS3Client.getObject(req); contentLength = s3Object.getObjectMetadata().getContentLength(); fullETag = s3Object.getObjectMetadata().getETag(); Progress progress = new TransferProgressWrapper(new TransferProgress()); progress.setTotalBytesToTransfer(contentLength); if (progressListener != null) { progressListener.withTransferProgress(progress).withCompleted(0.0).withMultiplier(1.0); }/*from w ww . ja va 2 s . c om*/ InputStream input = null; try { // create the output file, now that we know it actually exists if (output == null) { output = new RandomAccessFile(outputFile, "rw"); } // seek to the start of the chunk in the file, just in case we're retrying output.seek(0); input = s3Object.getObjectContent(); return copyAndHash(input, contentLength, progress); } finally { IOUtils.closeQuietly(input); } } }); }
From source file:com.github.rholder.esthree.command.GetMultipart.java
License:Apache License
public MessageDigest retryingGetWithRange(final long start, final long end) throws ExecutionException, RetryException { return (MessageDigest) RetryUtils.AWS_RETRYER.call(new Callable<Object>() { public MessageDigest call() throws Exception { long totalBytes = end - start + 1; Progress progress = new TransferProgressWrapper(new TransferProgress()); progress.setTotalBytesToTransfer(totalBytes); if (progressListener != null) { progressListener.withTransferProgress(progress).withCompleted((100.0 * start) / contentLength) .withMultiplier( (1.0 * totalBytes / (Math.min(contentLength, chunkSize))) / fileParts.size()); }//from w ww .j av a 2 s .c o m GetObjectRequest req = new GetObjectRequest(bucket, key).withRange(start, end); S3Object s3Object = amazonS3Client.getObject(req); InputStream input = null; try { // create the output file, now that we know it actually exists if (output == null) { output = new RandomAccessFile(outputFile, "rw"); } // seek to the start of the chunk in the file, just in case we're retrying output.seek(start); input = s3Object.getObjectContent(); return copyAndHash(input, totalBytes, progress); } finally { IOUtils.closeQuietly(input); } } }); }
From source file:com.github.wuic.nut.s3.S3NutDao.java
License:Open Source License
/** * {@inheritDoc}// w w w . j a v a 2 s .c o m */ @Override public Nut accessFor(final String realPath, final NutType type) throws StreamException { // Try to get S3 object S3Object s3Object; try { connect(); s3Object = amazonS3Client.getObject(bucketName, realPath); } catch (AmazonServiceException ase) { throw new StreamException(new IOException( String.format("Can't get S3Object on bucket %s for nut key : %s", bucketName, realPath), ase)); } S3ObjectInputStream s3ObjectInputStream = null; try { // Get S3Object content s3ObjectInputStream = s3Object.getObjectContent(); // Download path into memory final ByteArrayOutputStream baos = new ByteArrayOutputStream(IOUtils.WUIC_BUFFER_LEN); IOUtils.copyStream(s3ObjectInputStream, baos); // Create nut return new ByteArrayNut(baos.toByteArray(), realPath, type, getVersionNumber(realPath)); } finally { // Close S3Object stream IOUtils.close(s3ObjectInputStream); } }
From source file:com.gst.infrastructure.documentmanagement.contentrepository.S3ContentRepository.java
License:Apache License
@Override public FileData fetchFile(final DocumentData documentData) throws DocumentNotFoundException { FileData fileData = null;//w w w . j av a 2 s. co m final String fileName = documentData.fileName(); try { logger.info("Downloading an object"); final S3Object s3object = this.s3Client .getObject(new GetObjectRequest(this.s3BucketName, documentData.fileLocation())); fileData = new FileData(s3object.getObjectContent(), fileName, documentData.contentType()); } catch (final AmazonClientException ace) { logger.error(ace.getMessage()); throw new DocumentNotFoundException(documentData.getParentEntityType(), documentData.getParentEntityId(), documentData.getId()); } return fileData; }
From source file:com.gst.infrastructure.documentmanagement.contentrepository.S3ContentRepository.java
License:Apache License
@Override public ImageData fetchImage(final ImageData imageData) { try {/*www .ja va 2s.c o m*/ final S3Object s3object = this.s3Client .getObject(new GetObjectRequest(this.s3BucketName, imageData.location())); imageData.updateContent(s3object.getObjectContent()); } catch (AmazonS3Exception e) { logger.error(e.getMessage()); } return imageData; }
From source file:com.haskins.cloudtrailviewer.core.EventLoader.java
License:Open Source License
private InputStream loadEventFromS3(AmazonS3 s3Client, String bucketName, final String key) throws IOException { S3Object s3Object = s3Client.getObject(new GetObjectRequest(bucketName, key)); return s3Object.getObjectContent(); }
From source file:com.hpe.caf.worker.datastore.s3.S3DataStore.java
License:Apache License
@Override public InputStream retrieve(String reference) throws DataStoreException { LOG.debug("Received retrieve request for {}", reference); numRx.incrementAndGet();// www. j a va2 s .c o m try { S3Object s3Object = amazonS3Client.getObject(bucketName, reference); //Do not close this as we return the stream and it should be closed by the caller. return s3Object.getObjectContent(); } catch (Exception e) { errors.incrementAndGet(); throw new DataStoreException("Failed to retrieve data from reference " + reference, e); } }
From source file:com.ikanow.infinit.e.harvest.extraction.document.file.AwsInfiniteFile.java
License:Open Source License
@Override public InputStream getInputStream() throws IOException { S3Object s3Obj = ((AmazonS3Client) _awsClient).getObject(_awsBucketName, _awsObjectName); return s3Obj.getObjectContent(); }
From source file:com.imos.sample.S3SampleCheck.java
License:Open Source License
public static void main(String[] args) throws IOException { /*//from w w w .j a v a2 s. co m * The ProfileCredentialsProvider will return your [default] * credential profile by reading from the credentials file located at * (/home/alok/.aws/credentials). */ AWSCredentials credentials = null; try { credentials = new ProfileCredentialsProvider("default").getCredentials(); } catch (Exception e) { throw new AmazonClientException("Cannot load the credentials from the credential profiles file. " + "Please make sure that your credentials file is at the correct " + "location (/home/alok/.aws/credentials), and is in valid format.", e); } AmazonS3 s3 = new AmazonS3Client(credentials); // Region usWest2 = Region.getRegion(Regions.US_WEST_2); Region usWest2 = Region.getRegion(Regions.AP_SOUTHEAST_1); s3.setRegion(usWest2); String bucketName = "alok-test"; String key = "sample.json"; System.out.println("==========================================="); System.out.println("Getting Started with Amazon S3"); System.out.println("===========================================\n"); try { /* * Create a new S3 bucket - Amazon S3 bucket names are globally unique, * so once a bucket name has been taken by any user, you can't create * another bucket with that same name. * * You can optionally specify a location for your bucket if you want to * keep your data closer to your applications or users. */ // System.out.println("Creating bucket " + bucketName + "\n"); // s3.createBucket(bucketName); /* * List the buckets in your account */ // System.out.println("Listing buckets"); // for (Bucket bucket : s3.listBuckets()) { // System.out.println(" - " + bucket.getName()); // } System.out.println(); /* * Upload an object to your bucket - You can easily upload a file to * S3, or upload directly an InputStream if you know the length of * the data in the stream. You can also specify your own metadata * when uploading to S3, which allows you set a variety of options * like content-type and content-encoding, plus additional metadata * specific to your applications. */ System.out.println("Uploading a new object to S3 from a file\n"); //s3.putObject(new PutObjectRequest(bucketName, key, createSampleFile())); s3.putObject(new PutObjectRequest(bucketName, key, createSampleFile())); /* * Download an object - When you download an object, you get all of * the object's metadata and a stream from which to read the contents. * It's important to read the contents of the stream as quickly as * possibly since the data is streamed directly from Amazon S3 and your * network connection will remain open until you read all the data or * close the input stream. * * GetObjectRequest also supports several other options, including * conditional downloading of objects based on modification times, * ETags, and selectively downloading a range of an object. */ System.out.println("Downloading an object"); // S3Object object = s3.getObject(new GetObjectRequest(bucketName, key)); S3Object object = s3.getObject(new GetObjectRequest("alok-test", key)); System.out.println("Content-Type: " + object.getObjectMetadata().getContentType()); displayTextInputStream(object.getObjectContent()); /* * List objects in your bucket by prefix - There are many options for * listing the objects in your bucket. Keep in mind that buckets with * many objects might truncate their results when listing their objects, * so be sure to check if the returned object listing is truncated, and * use the AmazonS3.listNextBatchOfObjects(...) operation to retrieve * additional results. */ System.out.println("Listing objects"); ObjectListing objectListing = s3.listObjects(new ListObjectsRequest() // .withBucketName(bucketName) .withBucketName("alok-test")); // .withPrefix("My")); objectListing.getObjectSummaries().forEach((objectSummary) -> { System.out.println( " - " + objectSummary.getKey() + " " + "(size = " + objectSummary.getSize() + ")"); }); System.out.println(); /* * Delete an object - Unless versioning has been turned on for your bucket, * there is no way to undelete an object, so use caution when deleting objects. */ // System.out.println("Deleting an object\n"); // s3.deleteObject(bucketName, key); /* * Delete a bucket - A bucket must be completely empty before it can be * deleted, so remember to delete any objects from your buckets before * you try to delete them. */ // System.out.println("Deleting bucket " + bucketName + "\n"); // s3.deleteBucket(bucketName); } catch (AmazonServiceException ase) { System.out.println("Caught an AmazonServiceException, which means your request made it " + "to Amazon S3, but was rejected with an error response for some reason."); System.out.println("Error Message: " + ase.getMessage()); System.out.println("HTTP Status Code: " + ase.getStatusCode()); System.out.println("AWS Error Code: " + ase.getErrorCode()); System.out.println("Error Type: " + ase.getErrorType()); System.out.println("Request ID: " + ase.getRequestId()); } catch (AmazonClientException ace) { System.out.println("Caught an AmazonClientException, which means the client encountered " + "a serious internal problem while trying to communicate with S3, " + "such as not being able to access the network."); System.out.println("Error Message: " + ace.getMessage()); } }
From source file:com.imos.sample.SampleS3.java
public static void main(String[] args) throws IOException { AmazonS3 s3Client = new AmazonS3Client(new ProfileCredentialsProvider()); S3Object object = s3Client.getObject(new GetObjectRequest("inv.adminconsole.test", "")); InputStream objectData = object.getObjectContent(); // Process the objectData stream. objectData.close();//from www . j ava2 s. c o m }