List of usage examples for com.amazonaws.services.s3.model S3ObjectSummary getKey
public String getKey()
From source file:io.druid.firehose.s3.StaticS3FirehoseFactory.java
License:Apache License
@Override protected InputStream openObjectStream(S3ObjectSummary object, long start) throws IOException { final GetObjectRequest request = new GetObjectRequest(object.getBucketName(), object.getKey()); request.setRange(start);//from w w w .j a va 2s. c o m try { final S3Object s3Object = s3Client.getObject(request); if (s3Object == null) { throw new ISE("Failed to get an s3 object for bucket[%s], key[%s], and start[%d]", object.getBucketName(), object.getKey(), start); } return s3Object.getObjectContent(); } catch (AmazonS3Exception e) { throw new IOException(e); } }
From source file:io.druid.firehose.s3.StaticS3FirehoseFactory.java
License:Apache License
@Override protected InputStream wrapObjectStream(S3ObjectSummary object, InputStream stream) throws IOException { return object.getKey().endsWith(".gz") ? CompressionUtils.gzipInputStream(stream) : stream; }
From source file:io.druid.storage.s3.S3DataSegmentFinder.java
License:Apache License
@Override public Set<DataSegment> findSegments(String workingDirPath, boolean updateDescriptor) throws SegmentLoadingException { final Set<DataSegment> segments = Sets.newHashSet(); try {//from w w w . j av a 2s. co m final Iterator<S3ObjectSummary> objectSummaryIterator = S3Utils.objectSummaryIterator(s3Client, config.getBucket(), workingDirPath.length() == 0 ? config.getBaseKey() : workingDirPath, config.getMaxListingLength()); while (objectSummaryIterator.hasNext()) { final S3ObjectSummary objectSummary = objectSummaryIterator.next(); if (S3Utils.toFilename(objectSummary.getKey()).equals("descriptor.json")) { final String descriptorJson = objectSummary.getKey(); String indexZip = S3Utils.indexZipForSegmentPath(descriptorJson); if (S3Utils.isObjectInBucketIgnoringPermission(s3Client, config.getBucket(), indexZip)) { try (S3Object indexObject = s3Client.getObject(config.getBucket(), descriptorJson); S3ObjectInputStream is = indexObject.getObjectContent()) { final ObjectMetadata objectMetadata = indexObject.getObjectMetadata(); final DataSegment dataSegment = jsonMapper.readValue(is, DataSegment.class); log.info("Found segment [%s] located at [%s]", dataSegment.getIdentifier(), indexZip); final Map<String, Object> loadSpec = dataSegment.getLoadSpec(); if (!S3StorageDruidModule.SCHEME.equals(loadSpec.get("type")) || !indexZip.equals(loadSpec.get("key")) || !config.getBucket().equals(loadSpec.get("bucket"))) { loadSpec.put("type", S3StorageDruidModule.SCHEME); loadSpec.put("key", indexZip); loadSpec.put("bucket", config.getBucket()); if (updateDescriptor) { log.info("Updating loadSpec in descriptor.json at [%s] with new path [%s]", descriptorJson, indexObject); final ByteArrayInputStream bais = new ByteArrayInputStream( StringUtils.toUtf8(jsonMapper.writeValueAsString(dataSegment))); s3Client.putObject(config.getBucket(), descriptorJson, bais, objectMetadata); } } segments.add(dataSegment); } } else { throw new SegmentLoadingException( "index.zip didn't exist at [%s] while descriptor.json exists!?", indexZip); } } } } catch (AmazonServiceException e) { throw new SegmentLoadingException(e, "Problem interacting with S3"); } catch (IOException e) { throw new SegmentLoadingException(e, "IO exception"); } catch (Exception e) { Throwables.propagateIfInstanceOf(e, SegmentLoadingException.class); Throwables.propagate(e); } return segments; }
From source file:io.druid.storage.s3.S3TimestampVersionedDataFinder.java
License:Apache License
/** * Gets the key with the most recently modified timestamp. * `pattern` is evaluated against the entire key AFTER the path given in `uri`. * The substring `pattern` is matched against will have a leading `/` removed. * For example `s3://some_bucket/some_prefix/some_key` with a URI of `s3://some_bucket/some_prefix` will match against `some_key`. * `s3://some_bucket/some_prefixsome_key` with a URI of `s3://some_bucket/some_prefix` will match against `some_key` * `s3://some_bucket/some_prefix//some_key` with a URI of `s3://some_bucket/some_prefix` will match against `/some_key` * * @param uri The URI of in the form of `s3://some_bucket/some_key` * @param pattern The pattern matcher to determine if a *key* is of interest, or `null` to match everything. * * @return A URI to the most recently modified object which matched the pattern. */// ww w.java 2 s .co m @Override public URI getLatestVersion(final URI uri, final @Nullable Pattern pattern) { try { return RetryUtils.retry(() -> { final S3Coords coords = new S3Coords(checkURI(uri)); long mostRecent = Long.MIN_VALUE; URI latest = null; final Iterator<S3ObjectSummary> objectSummaryIterator = S3Utils.objectSummaryIterator(s3Client, coords.bucket, coords.path, MAX_LISTING_KEYS); while (objectSummaryIterator.hasNext()) { final S3ObjectSummary objectSummary = objectSummaryIterator.next(); String keyString = objectSummary.getKey().substring(coords.path.length()); if (keyString.startsWith("/")) { keyString = keyString.substring(1); } if (pattern != null && !pattern.matcher(keyString).matches()) { continue; } final long latestModified = objectSummary.getLastModified().getTime(); if (latestModified >= mostRecent) { mostRecent = latestModified; latest = new URI(StringUtils.format("s3://%s/%s", objectSummary.getBucketName(), objectSummary.getKey())); } } return latest; }, shouldRetryPredicate(), DEFAULT_RETRY_COUNT); } catch (Exception e) { throw Throwables.propagate(e); } }
From source file:io.druid.storage.s3.S3Utils.java
License:Apache License
/** * Gets a single {@link S3ObjectSummary} from s3. Since this method might return a wrong object if there are multiple * objects that match the given key, this method should be used only when it's guaranteed that the given key is unique * in the given bucket.//from w w w.ja v a 2s . c o m * * @param s3Client s3 client * @param bucket s3 bucket * @param key unique key for the object to be retrieved */ public static S3ObjectSummary getSingleObjectSummary(AmazonS3 s3Client, String bucket, String key) { final ListObjectsV2Request request = new ListObjectsV2Request().withBucketName(bucket).withPrefix(key) .withMaxKeys(1); final ListObjectsV2Result result = s3Client.listObjectsV2(request); if (result.getKeyCount() == 0) { throw new ISE("Cannot find object for bucket[%s] and key[%s]", bucket, key); } final S3ObjectSummary objectSummary = result.getObjectSummaries().get(0); if (!objectSummary.getBucketName().equals(bucket) || !objectSummary.getKey().equals(key)) { throw new ISE("Wrong object[%s] for bucket[%s] and key[%s]", objectSummary, bucket, key); } return objectSummary; }
From source file:io.jeffrey.web.assemble.S3PutObjectTarget.java
/** * @param bucket the bucket where we intend to upload the files * @param s3 the S3 client//from w ww . ja va 2s . c o m */ public S3PutObjectTarget(final String bucket, final AmazonS3 s3) { this.bucket = bucket; this.s3 = s3; this.etags = new HashMap<>(); String marker = ""; boolean again = true; while (again) { again = false; ListObjectsRequest request = new ListObjectsRequest(bucket, "", marker, null, 1000); for (S3ObjectSummary obj : s3.listObjects(request).getObjectSummaries()) { marker = obj.getKey(); if (marker.startsWith("charts/")) { s3.deleteObject(bucket, marker); } System.out.println(marker + "->" + obj.getETag()); etags.put(obj.getKey(), obj.getETag()); again = true; } } }
From source file:io.konig.camel.aws.s3.DeleteObjectConsumer.java
License:Apache License
protected Queue<Exchange> createExchanges(List<S3ObjectSummary> s3ObjectSummaries) { if (LOG.isTraceEnabled()) { LOG.trace("Received {} messages in this poll", s3ObjectSummaries.size()); }//from w ww. j ava 2 s . com Collection<S3Object> s3Objects = new ArrayList<>(); Queue<Exchange> answer = new LinkedList<Exchange>(); try { for (S3ObjectSummary s3ObjectSummary : s3ObjectSummaries) { S3Object s3Object = getAmazonS3Client().getObject(s3ObjectSummary.getBucketName(), s3ObjectSummary.getKey()); s3Objects.add(s3Object); Exchange exchange = getEndpoint().createExchange(s3Object); answer.add(exchange); } } catch (Throwable e) { LOG.warn("Error getting S3Object due: " + e.getMessage(), e); // ensure all previous gathered s3 objects are closed // if there was an exception creating the exchanges in this batch s3Objects.forEach(IOHelper::close); throw e; } return answer; }
From source file:io.milton.s3.AmazonS3ManagerImpl.java
License:Open Source License
@Override public boolean deleteEntities(String bucketName) { LOG.info("Deletes multiple objects in a bucket " + bucketName + " from Amazon S3"); List<S3ObjectSummary> s3ObjectSummaries = findEntityByBucket(bucketName); if (s3ObjectSummaries == null || s3ObjectSummaries.isEmpty()) { return false; }//from w w w .j a v a 2 s. com // Provide a list of object keys and versions. List<KeyVersion> keyVersions = new ArrayList<KeyVersion>(); for (S3ObjectSummary s3ObjectSummary : s3ObjectSummaries) { keyVersions.add(new KeyVersion(s3ObjectSummary.getKey())); } try { DeleteObjectsRequest deleteObjectsRequest = new DeleteObjectsRequest(bucketName).withKeys(keyVersions); DeleteObjectsResult deleteObjectsResult = amazonS3Client.deleteObjects(deleteObjectsRequest); if (deleteObjectsResult != null) { LOG.info("Successfully deleted all the " + deleteObjectsResult.getDeletedObjects().size() + " items.\n"); return true; } } catch (AmazonServiceException ase) { LOG.warn(ase.getMessage(), ase); } catch (AmazonClientException ace) { LOG.warn(ace.getMessage(), ace); } return false; }
From source file:io.milton.s3.service.AmazonStorageServiceImpl.java
License:Open Source License
@Override public List<Entity> findEntityByParent(String bucketName, Folder parent) { if (parent == null) { return Collections.emptyList(); }// ww w . jav a 2 s. co m // Get all files of current folder have already existing in Amazon S3 List<S3ObjectSummary> objectSummaries = amazonS3Manager.findEntityByPrefixKey(bucketName, parent.getId().toString()); List<Entity> children = new ArrayList<Entity>(); for (S3ObjectSummary objectSummary : objectSummaries) { String uniqueId = objectSummary.getKey(); // Search by only unique UUID of entity uniqueId = uniqueId.substring(uniqueId.indexOf("/") + 1); File file = (File) dynamoDBManager.findEntityByUniqueId(bucketName, uniqueId, parent); if (file != null) { file.setSize(objectSummary.getSize()); children.add(file); } } // Get all folders of current folder have already existing in Amazon DynamoDB List<Entity> folders = dynamoDBManager.findEntityByParentAndType(bucketName, parent, true); if (folders != null && !folders.isEmpty()) { for (Entity folder : folders) { if (!children.contains(folder)) { children.add(folder); } } } return children; }
From source file:it.openutils.mgnlaws.magnolia.datastore.S3DataRecord.java
License:Open Source License
public S3DataRecord(AmazonS3 client, S3ObjectSummary summary) { this(new DataIdentifier(StringUtils.substringAfterLast(summary.getKey(), "/")), new S3LazyObject(client, summary.getBucketName(), summary.getKey())); }