Example usage for com.amazonaws.services.s3.model S3ObjectSummary getKey

List of usage examples for com.amazonaws.services.s3.model S3ObjectSummary getKey

Introduction

In this page you can find the example usage for com.amazonaws.services.s3.model S3ObjectSummary getKey.

Prototype

public String getKey() 

Source Link

Document

Gets the key under which this object is stored in Amazon S3.

Usage

From source file:com.ge.predix.solsvc.blobstore.bootstrap.BlobstoreClientImpl.java

License:Apache License

/**
* Gets the list of available Blobs for the binded bucket from the
* BlobStore./*from w w w .ja v  a  2s.c o m*/
*
* @return List of DataFile Blobs
*/
@Override
public List<DataFile> getBlob() {
    S3Object obj = null;
    try {
        List<DataFile> objs = new ArrayList<DataFile>();
        // Get the List from BlobStore
        ObjectListing objectList = this.s3Client.listObjects(this.blobstoreConfig.getBucketName());

        for (S3ObjectSummary objectSummary : objectList.getObjectSummaries()) {
            obj = this.s3Client.getObject(
                    new GetObjectRequest(this.blobstoreConfig.getBucketName(), objectSummary.getKey()));
            DataFile data = new DataFile();
            data.setFile(IOUtils.toByteArray(obj.getObjectContent()));
            objs.add(data);
        }
        return objs;
    } catch (IOException e) {
        throw new RuntimeException(e);
    } finally {
        if (obj != null) {
            try {
                obj.close();
            } catch (IOException e) {
                throw new RuntimeException(
                        "unable to close object object=" + obj + " throwing original exception", //$NON-NLS-1$ //$NON-NLS-2$
                        e);
            }
        }
    }
}

From source file:com.github.abhinavmishra14.aws.s3.service.impl.AwsS3IamServiceImpl.java

License:Open Source License

@Override
public void deleteDirectory(final String bucketName, final String dirName)
        throws AmazonClientException, AmazonServiceException {
    LOGGER.info("deleteDirectory invoked, bucketName: {}, dirName: {}", bucketName, dirName);
    final List<S3ObjectSummary> listOfFiles = s3client.listObjects(bucketName, dirName).getObjectSummaries();
    for (final S3ObjectSummary eachFile : listOfFiles) {
        s3client.deleteObject(bucketName, eachFile.getKey());
    }//from  www .j  av a 2  s .  co  m
    s3client.deleteObject(bucketName, dirName);
}

From source file:com.github.abhinavmishra14.aws.s3.service.impl.AwsS3IamServiceImpl.java

License:Open Source License

@Override
public void cleanAndDeleteBucket(final String bucketName) throws AmazonClientException, AmazonServiceException {
    LOGGER.info("cleanAndDeleteBucket invoked, bucketName: {}", bucketName);
    final List<S3ObjectSummary> listOfFiles = s3client.listObjects(bucketName).getObjectSummaries();
    for (final S3ObjectSummary eachFile : listOfFiles) {
        s3client.deleteObject(bucketName, eachFile.getKey());
    }//ww w  . j  a v a  2s .c o  m
    s3client.deleteBucket(bucketName);
}

From source file:com.github.lbroudoux.elasticsearch.river.s3.connector.S3Connector.java

License:Apache License

/**
 * Select and retrieves summaries of object into bucket and of given path prefix
 * that have modification date younger than lastScanTime.
 * @param lastScanTime Last modification date filter
 * @return Summaries of picked objects.//from  w  w w  .j a  va 2s  .c o m
 */
public S3ObjectSummaries getObjectSummaries(String riverName, Long lastScanTime, String initialScanBookmark,
        boolean trackS3Deletions) {
    List<String> keys = new ArrayList<String>();
    List<S3ObjectSummary> result = new ArrayList<S3ObjectSummary>();
    boolean initialScan = initialScanBookmark != null;

    if (initialScan) {
        trackS3Deletions = false;
        logger.info("{}: resuming initial scan of {} from {}", riverName, pathPrefix, initialScanBookmark);
    } else {
        logger.info("{}: checking {} for changes since {}", riverName, pathPrefix, lastScanTime);
    }

    // Store the scan time to return before doing big queries...
    Long lastScanTimeToReturn = System.currentTimeMillis();

    if (lastScanTime == null || initialScan) {
        lastScanTime = 0L;
    }

    ListObjectsRequest request = new ListObjectsRequest().withBucketName(bucketName).withPrefix(pathPrefix)
            .withEncodingType("url");
    ObjectListing listing = s3Client.listObjects(request);
    //logger.debug("Listing: {}", listing);
    int keyCount = 0;
    boolean scanTruncated = false;
    String lastKey = null;

    while (!listing.getObjectSummaries().isEmpty() || listing.isTruncated()) {
        List<S3ObjectSummary> summaries = listing.getObjectSummaries();
        // if (logger.isDebugEnabled()) {
        //    logger.debug("Found {} items in this listObjects page", summaries.size());
        // }

        for (S3ObjectSummary summary : summaries) {
            if (logger.isDebugEnabled()) {
                // logger.debug("Getting {} last modified on {}", summary.getKey(), summary.getLastModified());
            }

            if (trackS3Deletions) {
                keys.add(summary.getKey());
            }

            if (summary.getLastModified().getTime() > lastScanTime
                    && result.size() < MAX_NEW_RESULTS_TO_INDEX_ON_RUN) {
                // logger.debug("  Picked !");

                if (!initialScan || initialScanBookmark.compareTo(summary.getKey()) < 0) {
                    logger.debug("  Picked {}", summary.getKey());
                    result.add(summary);
                    lastKey = summary.getKey();
                }

            } else if (!scanTruncated && result.size() == MAX_NEW_RESULTS_TO_INDEX_ON_RUN) {
                logger.info("{}: only indexing up to {} new objects on this indexing run", riverName,
                        MAX_NEW_RESULTS_TO_INDEX_ON_RUN);
                // initialScan = true;
                scanTruncated = true;

                if (!trackS3Deletions) {
                    // No need to keep iterating through all keys if we aren't doing deleteOnS3 
                    break;
                }
            }

            keyCount += 1;
        }

        if (initialScan && scanTruncated && !trackS3Deletions) {
            break;
        }

        listing = s3Client.listNextBatchOfObjects(listing);
    }

    // Wrap results and latest scan time.
    if (scanTruncated) {
        logger.info("{}: scan truncated for speed: {} files ({} new)", riverName, keyCount, result.size());
    } else {
        logger.info("{}: complete scan: {} files ({} new)", riverName, keyCount, result.size());
    }

    return new S3ObjectSummaries(lastScanTimeToReturn, lastKey, scanTruncated, trackS3Deletions, result, keys);
}

From source file:com.github.lbroudoux.elasticsearch.river.s3.connector.S3Connector.java

License:Apache License

public String getDecodedKey(S3ObjectSummary summary) {
    //return summary.getKey();  // If you deactivate using withEncodingType above
    try {/*from  ww  w  .  j  ava 2 s.  com*/
        return java.net.URLDecoder.decode(summary.getKey(), "UTF-8");
    } catch (java.io.UnsupportedEncodingException e) {
        e.printStackTrace();
        return null;
    }
}

From source file:com.github.lbroudoux.elasticsearch.river.s3.connector.S3Connector.java

License:Apache License

/**
 * Get the download url of this S3 object. May return null if the
 * object bucket and key cannot be converted to a URL.
 * @param summary A S3 object//from w w  w.j  a v  a2s  .  co m
 * @param feedDefinition The holder of S3 feed definition.
 * @return The resource url if possible (access is subject to AWS credential)
 */
public String getDownloadUrl(S3ObjectSummary summary, S3RiverFeedDefinition feedDefinition) {
    String resourceUrl = s3Client.getResourceUrl(summary.getBucketName(), summary.getKey());
    // If a download host (actually a vhost such as cloudfront offers) is specified, use it to
    // recreate a vhosted resource url. This is made by substitution of the generic host name in url. 
    if (resourceUrl != null && feedDefinition.getDownloadHost() != null) {
        int hostPosEnd = resourceUrl.indexOf("s3.amazonaws.com/") + "s3.amazonaws.com".length();
        String vhostResourceUrl = feedDefinition.getDownloadHost() + resourceUrl.substring(hostPosEnd);
        return vhostResourceUrl;
    }
    return resourceUrl;
}

From source file:com.github.wuic.nut.s3.S3NutDao.java

License:Open Source License

/**
 * <p>/*ww  w .  j av a  2 s  .c o m*/
 * Searches recursively in the given path any files matching the given entry.
 * </p>
 *
 * @param path the path
 * @param pattern the pattern to match
 * @return the list of matching files
 * @throws StreamException if the client can't move to a directory or any I/O error occurs
 */
private List<String> recursiveSearch(final String path, final Pattern pattern) throws StreamException {

    ObjectListing objectListing;

    try {
        final String finalSuffix = path.equals("") ? "" : "/";
        connect();
        objectListing = amazonS3Client.listObjects(new ListObjectsRequest().withBucketName(bucketName)
                .withPrefix(IOUtils.mergePath(path.substring(1), finalSuffix)).withDelimiter("/"));
    } catch (AmazonServiceException ase) {
        throw new StreamException(new IOException(
                String.format("Can't get S3Object on bucket %s for nut key : %s", bucketName, path), ase));
    }

    final List<String> retval = new ArrayList<String>();
    for (final S3ObjectSummary s3ObjectSummary : objectListing.getObjectSummaries()) {
        // Ignore directories, all nuts are in the listing
        if (!s3ObjectSummary.getKey().endsWith("/")) {
            final Matcher matcher = pattern.matcher(s3ObjectSummary.getKey());

            if (matcher.find()) {
                retval.add(s3ObjectSummary.getKey());
            }
        }
    }

    // Recursive search on prefixes (directories)
    for (final String s3CommonPrefix : objectListing.getCommonPrefixes()) {
        retval.addAll(recursiveSearch(s3CommonPrefix.substring(0, s3CommonPrefix.length() - 1), pattern));
    }

    return retval;
}

From source file:com.handywedge.binarystore.store.aws.BinaryStoreManagerImpl.java

License:MIT License

@Override
public List<BinaryInfo> list(StorageInfo storage, BinaryInfo binary) throws StoreException {
    logger.debug("={}", storage);
    logger.debug("?={}", binary);

    List<BinaryInfo> objInfoList = new ArrayList<BinaryInfo>();

    AmazonS3 s3client = getS3Client(binary.getBucketName());

    try {//from w w  w . ja  v a2s. c o m
        logger.debug("Listing binaries");
        final ListObjectsV2Request req = new ListObjectsV2Request().withBucketName(binary.getBucketName())
                .withMaxKeys(2);
        ListObjectsV2Result result;
        do {
            result = s3client.listObjectsV2(req);
            for (S3ObjectSummary binarySummary : result.getObjectSummaries()) {
                logger.debug(" - {}(size={})", binarySummary.getKey(), binarySummary.getSize());
                if (binarySummary.getSize() != 0) {
                    BinaryInfo objInfo = new BinaryInfo(binary.getBucketName());
                    objInfo.setFileName(binarySummary.getKey());
                    objInfo.setSize(binarySummary.getSize());
                    S3Object s3Object = s3client
                            .getObject(new GetObjectRequest(binary.getBucketName(), binarySummary.getKey()));
                    objInfo.setContentType(s3Object.getObjectMetadata().getContentType());
                    objInfo.setUrl(s3client.getUrl(binary.getBucketName(), binarySummary.getKey()).toString());

                    logger.debug("Generating pre-signed URL.");
                    URL PresignedUrl = getPresignedUrl(s3client, binary.getBucketName(),
                            binarySummary.getKey());
                    objInfo.setPresignedUrl(PresignedUrl.toString());
                    logger.debug("Pre-Signed URL = " + PresignedUrl.toString());

                    objInfoList.add(objInfo);
                }
            }
            logger.debug("Next Continuation Token : " + result.getNextContinuationToken());
            req.setContinuationToken(result.getNextContinuationToken());
        } while (result.isTruncated() == true);

    } catch (AmazonServiceException ase) {
        throw new StoreException(HttpStatus.SC_BAD_REQUEST, ErrorClassification.LIST_FAIL, ase,
                binary.getFileName());
    } catch (AmazonClientException ace) {
        throw new StoreException(HttpStatus.SC_BAD_REQUEST, ErrorClassification.LIST_FAIL, ace,
                binary.getFileName());
    }

    logger.info(" ={}", objInfoList.size());
    return objInfoList;
}

From source file:com.haskins.cloudtrailviewer.dialog.s3filechooser.S3FileList.java

License:Open Source License

private void addFileKeys(ObjectListing objectListing) {

    List<S3ObjectSummary> objectSummaries = objectListing.getObjectSummaries();
    for (final S3ObjectSummary objectSummary : objectSummaries) {

        String file = stripPrefix(objectSummary.getKey());

        S3ListModel model = new S3ListModel(file, file, S3ListModel.FILE_DOC);
        this.s3ListModel.addElement(model);
    }//  w w  w. j  a va  2s.  c  o  m
}

From source file:com.haskins.cloudtrailviewer.dialog.s3filechooser.S3FileList.java

License:Open Source License

private void addFolderFiles(String path) {

    AmazonS3 s3Client = getS3Client();/*from   w  ww.  j  a va2  s.  co m*/

    ObjectListing current = s3Client.listObjects(currentAccount.getBucket(), path);
    List<S3ObjectSummary> objectSummaries = current.getObjectSummaries();

    for (final S3ObjectSummary objectSummary : objectSummaries) {
        String file = objectSummary.getKey();
        selected_keys.add(file);
    }

    while (current.isTruncated()) {

        current = s3Client.listNextBatchOfObjects(current);
        objectSummaries = current.getObjectSummaries();

        for (final S3ObjectSummary objectSummary : objectSummaries) {
            String file = objectSummary.getKey();
            selected_keys.add(file);
        }
    }
}