List of usage examples for com.amazonaws.services.s3.model S3Object getObjectContent
public S3ObjectInputStream getObjectContent()
From source file:com.nike.cerberus.operation.core.EnableConfigReplicationOperation.java
License:Apache License
private void touchCurrentFiles() { final String bucketName = environmentMetadata.getBucketName(); final ObjectListing objectListing = s3Client.listObjects(bucketName); logger.info("Touching config files that already exist so they are replicated."); objectListing.getObjectSummaries().forEach(os -> { if (!StringUtils.startsWith(os.getKey(), "consul")) { logger.debug("Touching {}.", os.getKey()); final S3Object object = s3Client.getObject(bucketName, os.getKey()); s3Client.putObject(bucketName, object.getKey(), object.getObjectContent(), object.getObjectMetadata()); }/*from w w w.j ava 2 s .c om*/ }); }
From source file:com.nike.cerberus.service.S3StoreService.java
License:Apache License
public Optional<String> get(String path) { GetObjectRequest request = new GetObjectRequest(s3Bucket, getFullPath(path)); try {//from ww w. j a v a 2 s.co m S3Object s3Object = s3Client.getObject(request); InputStream object = s3Object.getObjectContent(); return Optional.of(IOUtils.toString(object, ConfigConstants.DEFAULT_ENCODING)); } catch (AmazonServiceException ase) { if (StringUtils.equalsIgnoreCase(ase.getErrorCode(), "NoSuchKey")) { logger.debug(String.format("The S3 object doesn't exist. Bucket: %s, Key: %s", s3Bucket, request.getKey())); return Optional.empty(); } else { logger.error("Unexpected error communicating with AWS.", ase); throw ase; } } catch (IOException e) { String errorMessage = String.format( "Unable to read contents of S3 object. Bucket: %s, Key: %s, Expected Encoding: %s", s3Bucket, request.getKey(), ConfigConstants.DEFAULT_ENCODING); logger.error(errorMessage); throw new UnexpectedDataEncodingException(errorMessage, e); } }
From source file:com.projectlaver.batch.FacebookListingPostingItemProcessor.java
License:Open Source License
void copyS3ObjectToTempFile(File tempFile, String filename) throws IOException, FileNotFoundException { AWSCredentials myCredentials = new BasicAWSCredentials(this.s3accessKey, this.s3secretKey); AmazonS3 s3 = new AmazonS3Client(myCredentials); S3Object object = s3.getObject(this.s3publicBucketName, filename); IOUtils.copy(object.getObjectContent(), new FileOutputStream(tempFile)); }
From source file:com.projectlaver.service.ListingService.java
License:Open Source License
void streamAwsContentToResponse(String contentFilename, String bucketName, OutputStream outputStream) throws IOException { AWSCredentials myCredentials = new BasicAWSCredentials(this.s3accessKey, this.s3secretKey); AmazonS3 s3 = new AmazonS3Client(myCredentials); S3Object object = s3.getObject(bucketName, contentFilename); FileCopyUtils.copy(object.getObjectContent(), outputStream); }
From source file:com.qubole.presto.kinesis.s3config.S3TableConfigClient.java
License:Apache License
/** * Connect to S3 directory to look for new or updated table definitions and then * update the map.//from w ww. j a va 2 s . c o m */ protected void updateTablesFromS3() { long now = System.currentTimeMillis(); List<S3ObjectSummary> objectList = this.getObjectSummaries(); AmazonS3Client s3client = this.clientManager.getS3Client(); AmazonS3URI directoryURI = new AmazonS3URI(this.bucketUrl); for (S3ObjectSummary objInfo : objectList) { if (!this.internalMap.containsKey(objInfo.getKey()) || objInfo.getLastModified().getTime() >= this.lastCheck) { // New or updated file, so we must read from AWS try { if (objInfo.getKey().endsWith("/")) { continue; } log.info("Getting : %s - %s", objInfo.getBucketName(), objInfo.getKey()); S3Object object = s3client .getObject(new GetObjectRequest(objInfo.getBucketName(), objInfo.getKey())); StringBuilder resultStr = new StringBuilder(""); try (BufferedReader reader = new BufferedReader( new InputStreamReader(object.getObjectContent()))) { boolean hasMore = true; while (hasMore) { String line = reader.readLine(); if (line != null) { resultStr.append(line); } else { hasMore = false; } } KinesisStreamDescription table = streamDescriptionCodec.fromJson(resultStr.toString()); internalMap.put(objInfo.getKey(), table); log.info("Put table description into the map from %s", objInfo.getKey()); } catch (IOException iox) { log.error("Problem reading input stream from object.", iox); } } catch (AmazonServiceException ase) { StringBuilder sb = new StringBuilder(); sb.append("Caught an AmazonServiceException, which means your request made it "); sb.append("to Amazon S3, but was rejected with an error response for some reason.\n"); sb.append("Error Message: " + ase.getMessage()); sb.append("HTTP Status Code: " + ase.getStatusCode()); sb.append("AWS Error Code: " + ase.getErrorCode()); sb.append("Error Type: " + ase.getErrorType()); sb.append("Request ID: " + ase.getRequestId()); log.error(sb.toString(), ase); } catch (AmazonClientException ace) { StringBuilder sb = new StringBuilder(); sb.append("Caught an AmazonClientException, " + "which means the client encountered " + "an internal error while trying to communicate" + " with S3, " + "such as not being able to access the network."); sb.append("Error Message: " + ace.getMessage()); log.error(sb.toString(), ace); } } } // end loop through object descriptions log.info("Completed updating table definitions from S3."); this.lastCheck = now; return; }
From source file:com.rathravane.clerk.impl.s3.S3IamDb.java
License:Apache License
/** * Load an object to a stream./*from w w w .ja va 2s . c o m*/ * @param key * @param os * @returns true if found, false if not found * @throws IamSvcException * @throws IamBadRequestException */ private boolean loadTo(String key, OutputStream os) throws IamSvcException { S3Object object = null; try { object = fDb.getObject(new GetObjectRequest(fBucketId, key)); final InputStream is = object.getObjectContent(); // s3 objects must be closed or will leak an HTTP connection rrStreamTools.copyStream(is, os); return true; } catch (AmazonServiceException x) { if (404 == x.getStatusCode()) return false; throw new IamSvcException(x); } catch (AmazonClientException x) { throw new IamSvcException(x); } catch (IOException x) { throw new IamSvcException(x); } finally { if (object != null) { try { object.close(); } catch (IOException e) { throw new IamSvcException(e); } } } }
From source file:com.sangupta.urn.service.impl.AmazonS3UrnStorageServiceImpl.java
License:Apache License
@Override protected UrnObject get(String objectKey) { S3Object object = this.client.getObject(this.bucketName, objectKey); if (object == null) { return null; }/*w w w . j a va2 s . c o m*/ try { InputStream stream = object.getObjectContent(); byte[] bytes = IOUtils.toByteArray(stream); UrnObject urnObject = new UrnObject(objectKey, bytes); // TODO: read and populate metadata ObjectMetadata metadata = object.getObjectMetadata(); if (metadata != null) { if (metadata.getHttpExpiresDate() != null) { urnObject.expiry = metadata.getHttpExpiresDate().getTime(); } urnObject.mime = metadata.getContentType(); urnObject.stored = metadata.getLastModified().getTime(); // TODO:parse the value to extract the filename if available urnObject.name = metadata.getContentDisposition(); } // return the object return urnObject; } catch (IOException e) { // happens when we cannot read data from S3 LOGGER.debug("Exception reading data from S3 for object key: " + objectKey, e); return null; } finally { if (object != null) { try { object.close(); } catch (IOException e) { LOGGER.warn("Unable to close S3 object during/after reading the object"); } } } }
From source file:com.shareplaylearn.models.UserItemManager.java
License:Open Source License
public Response getItem(String contentType, ItemSchema.PresentationType presentationType, String name, String encoding) {//from ww w . j av a 2 s .c om if (encoding != null && encoding.length() > 0 && !AvailableEncodings.isAvailable(encoding)) { return Response.status(Response.Status.UNSUPPORTED_MEDIA_TYPE) .entity("Inner Encoding Type: " + encoding + " not available").build(); } AmazonS3Client s3Client = new AmazonS3Client( new BasicAWSCredentials(SecretsService.amazonClientId, SecretsService.amazonClientSecret)); try { S3Object object = s3Client.getObject(ItemSchema.S3_BUCKET, getItemLocation(name, contentType, presentationType)); try (S3ObjectInputStream inputStream = object.getObjectContent()) { long contentLength = object.getObjectMetadata().getContentLength(); if (contentLength > Limits.MAX_RETRIEVE_SIZE) { throw new IOException("Object is to large: " + contentLength + " bytes."); } int bufferSize = Math.min((int) contentLength, 10 * 8192); byte[] buffer = new byte[bufferSize]; ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); int bytesRead = 0; int totalBytesRead = 0; while ((bytesRead = inputStream.read(buffer)) > 0) { outputStream.write(buffer, 0, bytesRead); totalBytesRead += bytesRead; } log.debug("GET in file resource read: " + totalBytesRead + " bytes."); if (encoding == null || encoding.length() == 0 || encoding.equals(AvailableEncodings.IDENTITY)) { return Response.status(Response.Status.OK).entity(outputStream.toByteArray()).build(); } else if (encoding.equals(AvailableEncodings.BASE64)) { return Response.status(Response.Status.OK) .entity(Base64.encodeAsString(outputStream.toByteArray())).build(); } else { return Response.status(Response.Status.UNSUPPORTED_MEDIA_TYPE) .entity("Inner Encoding Type: " + encoding + " not available").build(); } } } catch (Exception e) { StringWriter sw = new StringWriter(); PrintWriter pw = new PrintWriter(sw); pw.println("\nFailed to retrieve: " + name); e.printStackTrace(pw); log.warn("Failed to retrieve: " + name); log.info(Exceptions.asString(e)); return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(sw.toString()).build(); } }
From source file:com.shelfmap.simplequery.domain.impl.DefaultBlobReference.java
License:Apache License
@Override public InputStream getInputStream() { S3Object resource = getS3ObjectRemote(); this.metadata = resource.getObjectMetadata(); return resource.getObjectContent(); }
From source file:com.sjsu.faceit.example.S3Sample.java
License:Open Source License
public static void main(String[] args) throws IOException { /*/*from w ww. ja v a 2 s . c o m*/ * Important: Be sure to fill in your AWS access credentials in the * AwsCredentials.properties file before you try to run this * sample. * http://aws.amazon.com/security-credentials */ System.out.println(new File(".").getAbsolutePath()); AmazonS3 s3 = new AmazonS3Client( new PropertiesCredentials(S3Sample.class.getResourceAsStream("AwsCredentials.properties"))); String bucketName = "my-first-s3-bucket-" + UUID.randomUUID(); String key = "MyObjectKey"; System.out.println("==========================================="); System.out.println("Getting Started with Amazon S3"); System.out.println("===========================================\n"); try { /* * Create a new S3 bucket - Amazon S3 bucket names are globally unique, * so once a bucket name has been taken by any user, you can't create * another bucket with that same name. * * You can optionally specify a location for your bucket if you want to * keep your data closer to your applications or users. */ System.out.println("Creating bucket " + bucketName + "\n"); s3.createBucket(bucketName); /* * List the buckets in your account */ System.out.println("Listing buckets"); for (Bucket bucket : s3.listBuckets()) { System.out.println(" - " + bucket.getName()); } System.out.println(); /* * Upload an object to your bucket - You can easily upload a file to * S3, or upload directly an InputStream if you know the length of * the data in the stream. You can also specify your own metadata * when uploading to S3, which allows you set a variety of options * like content-type and content-encoding, plus additional metadata * specific to your applications. */ System.out.println("Uploading a new object to S3 from a file\n"); s3.putObject(new PutObjectRequest(bucketName, "abc/" + key, new File("/Users/prayag/Desktop/2.jpg"))); /* * Download an object - When you download an object, you get all of * the object's metadata and a stream from which to read the contents. * It's important to read the contents of the stream as quickly as * possibly since the data is streamed directly from Amazon S3 and your * network connection will remain open until you read all the data or * close the input stream. * * GetObjectRequest also supports several other options, including * conditional downloading of objects based on modification times, * ETags, and selectively downloading a range of an object. */ System.out.println("Downloading an object"); S3Object object = s3.getObject(new GetObjectRequest(bucketName, "abc/" + key)); System.out.println("Content-Type: " + object.getObjectMetadata().getContentType()); displayTextInputStream(object.getObjectContent()); /* * List objects in your bucket by prefix - There are many options for * listing the objects in your bucket. Keep in mind that buckets with * many objects might truncate their results when listing their objects, * so be sure to check if the returned object listing is truncated, and * use the AmazonS3.listNextBatchOfObjects(...) operation to retrieve * additional results. */ System.out.println("Listing objects"); ObjectListing objectListing = s3 .listObjects(new ListObjectsRequest().withBucketName(bucketName).withPrefix("My")); for (S3ObjectSummary objectSummary : objectListing.getObjectSummaries()) { System.out.println( " - " + objectSummary.getKey() + " " + "(size = " + objectSummary.getSize() + ")"); } System.out.println(); /* * Delete an object - Unless versioning has been turned on for your bucket, * there is no way to undelete an object, so use caution when deleting objects. */ // System.out.println("Deleting an object\n"); // s3.deleteObject(bucketName, key); /* * Delete a bucket - A bucket must be completely empty before it can be * deleted, so remember to delete any objects from your buckets before * you try to delete them. */ // System.out.println("Deleting bucket " + bucketName + "\n"); // s3.deleteBucket(bucketName); } catch (AmazonServiceException ase) { System.out.println("Caught an AmazonServiceException, which means your request made it " + "to Amazon S3, but was rejected with an error response for some reason."); System.out.println("Error Message: " + ase.getMessage()); System.out.println("HTTP Status Code: " + ase.getStatusCode()); System.out.println("AWS Error Code: " + ase.getErrorCode()); System.out.println("Error Type: " + ase.getErrorType()); System.out.println("Request ID: " + ase.getRequestId()); } catch (AmazonClientException ace) { System.out.println("Caught an AmazonClientException, which means the client encountered " + "a serious internal problem while trying to communicate with S3, " + "such as not being able to access the network."); System.out.println("Error Message: " + ace.getMessage()); } }