Example usage for com.amazonaws.auth BasicAWSCredentials BasicAWSCredentials

List of usage examples for com.amazonaws.auth BasicAWSCredentials BasicAWSCredentials

Introduction

In this page you can find the example usage for com.amazonaws.auth BasicAWSCredentials BasicAWSCredentials.

Prototype

public BasicAWSCredentials(String accessKey, String secretKey) 

Source Link

Document

Constructs a new BasicAWSCredentials object, with the specified AWS access key and AWS secret key.

Usage

From source file:datameer.awstasks.aws.emr.AmazonElasticMapReduceCustomClient.java

License:Apache License

public AmazonElasticMapReduceCustomClient(String awsAccessKeyId, String awsSecretAccessKey) {
    super(new BasicAWSCredentials(awsAccessKeyId, awsSecretAccessKey));
}

From source file:datameer.awstasks.aws.emr.EmrCluster.java

License:Apache License

private String uploadingJobJar(File jobJar, String s3JobJarName) {
    if (_s3Service == null) {
        _s3Service = new AmazonS3Client(new BasicAWSCredentials(getSettings().getAccessKey(), _accessSecret));
    }/*ww w .j  a v  a2  s.co  m*/
    synchronized (jobJar.getAbsolutePath().intern()) {
        String s3JobJarPath = new File(getSettings().getS3JobJarBasePath(), s3JobJarName).getPath();
        String s3Bucket = getSettings().getS3Bucket();
        if (!_s3Service.doesBucketExist(s3Bucket)) {
            throw new IllegalStateException("s3 bucket '" + s3Bucket + "' does not exists");
        }
        if (!S3Util.existsFile(_s3Service, s3Bucket, s3JobJarPath)) {
            LOG.info("uploading " + jobJar + " to " + s3JobJarPath);
            S3Util.uploadFile(_s3Service, s3Bucket, jobJar, s3JobJarPath);
        } else {
            LOG.info("using cached job-jar: " + s3JobJarPath);
        }
        return "s3n://" + getSettings().getAccessKey() + "@" + s3Bucket + s3JobJarPath;
    }
}

From source file:datameer.awstasks.util.Ec2Configuration.java

License:Apache License

public AmazonS3 createS3Service() {
    return new AmazonS3Client(new BasicAWSCredentials(_accessKeyId, _accessKeySecret));
}

From source file:de.taimos.pipeline.aws.AWSClientFactory.java

License:Apache License

private static AWSCredentialsProvider handleStaticCredentials(EnvVars vars) {
    String accessKey = vars.get(AWS_ACCESS_KEY_ID);
    String secretAccessKey = vars.get(AWS_SECRET_ACCESS_KEY);
    if (accessKey != null && secretAccessKey != null) {
        String sessionToken = vars.get(AWS_SESSION_TOKEN);
        if (sessionToken != null) {
            return new AWSStaticCredentialsProvider(
                    new BasicSessionCredentials(accessKey, secretAccessKey, sessionToken));
        }/* w  w  w .j a  v a 2 s .  c  om*/
        return new AWSStaticCredentialsProvider(new BasicAWSCredentials(accessKey, secretAccessKey));
    }
    return null;
}

From source file:dynamok.sink.DynamoDbSinkTask.java

License:Apache License

@Override
public void start(Map<String, String> props) {
    config = new ConnectorConfig(props);

    if (config.accessKeyId.value().isEmpty() || config.secretKeyId.value().isEmpty()) {
        client = new AmazonDynamoDBClient();
        log.debug("AmazonDynamoDBClient created with default credentials");
    } else {/* ww w  . j av a 2  s.  c o m*/
        BasicAWSCredentials awsCreds = new BasicAWSCredentials(config.accessKeyId.value(),
                config.secretKeyId.value());
        client = new AmazonDynamoDBClient(awsCreds);
        log.debug("AmazonDynamoDBClient created with AWS credentials from connector configuration");
    }

    client.configureRegion(config.region);
    remainingRetries = config.maxRetries;
}

From source file:dynamok.source.DynamoDbSourceConnector.java

License:Apache License

@Override
public void start(Map<String, String> props) {
    config = new ConnectorConfig(props);
    streamShards = new HashMap<>();

    final AmazonDynamoDBClient client;
    final AmazonDynamoDBStreamsClient streamsClient;

    if (config.accessKeyId.value().isEmpty() || config.secretKeyId.value().isEmpty()) {
        client = new AmazonDynamoDBClient();
        streamsClient = new AmazonDynamoDBStreamsClient();
        log.debug("AmazonDynamoDB clients created with default credentials");
    } else {/* ww  w. j a v a 2 s.  c o m*/
        BasicAWSCredentials awsCreds = new BasicAWSCredentials(config.accessKeyId.value(),
                config.secretKeyId.value());
        client = new AmazonDynamoDBClient(awsCreds);
        streamsClient = new AmazonDynamoDBStreamsClient(awsCreds);
        log.debug("AmazonDynamoDB clients created with AWS credentials from connector configuration");
    }

    client.configureRegion(config.region);
    streamsClient.configureRegion(config.region);

    final Set<String> ignoredTables = new HashSet<>();
    final Set<String> consumeTables = new HashSet<>();

    String lastEvaluatedTableName = null;
    do {
        final ListTablesResult listResult = client.listTables(lastEvaluatedTableName);

        for (String tableName : listResult.getTableNames()) {
            if (!acceptTable(tableName)) {
                ignoredTables.add(tableName);
                continue;
            }

            final TableDescription tableDesc = client.describeTable(tableName).getTable();

            final StreamSpecification streamSpec = tableDesc.getStreamSpecification();

            if (streamSpec == null || !streamSpec.isStreamEnabled()) {
                throw new ConnectException(
                        String.format("DynamoDB table `%s` does not have streams enabled", tableName));
            }

            final String streamViewType = streamSpec.getStreamViewType();
            if (!streamViewType.equals(StreamViewType.NEW_IMAGE.name())
                    && !streamViewType.equals(StreamViewType.NEW_AND_OLD_IMAGES.name())) {
                throw new ConnectException(String.format("DynamoDB stream view type for table `%s` is %s",
                        tableName, streamViewType));
            }

            final DescribeStreamResult describeStreamResult = streamsClient
                    .describeStream(new DescribeStreamRequest().withStreamArn(tableDesc.getLatestStreamArn()));

            for (Shard shard : describeStreamResult.getStreamDescription().getShards()) {
                streamShards.put(shard, tableDesc);
            }

            consumeTables.add(tableName);
        }

        lastEvaluatedTableName = listResult.getLastEvaluatedTableName();
    } while (lastEvaluatedTableName != null);

    log.info("Tables to ignore: {}", ignoredTables);
    log.info("Tables to ingest: {}", consumeTables);

    client.shutdown();
    streamsClient.shutdown();
}

From source file:dynamok.source.DynamoDbSourceTask.java

License:Apache License

@Override
public void start(Map<String, String> props) {
    config = new TaskConfig(props);

    if (config.accessKeyId.toString().isEmpty() || config.secretKeyId.toString().isEmpty()) {
        streamsClient = new AmazonDynamoDBStreamsClient();
        log.debug("AmazonDynamoDBStreamsClient created with default credentials");
    } else {//from   www .java 2  s.c o  m
        BasicAWSCredentials awsCreds = new BasicAWSCredentials(config.accessKeyId.toString(),
                config.secretKeyId.toString());
        streamsClient = new AmazonDynamoDBStreamsClient(awsCreds);
        log.debug("AmazonDynamoDBStreamsClient created with AWS credentials from connector configuration");
    }

    streamsClient.configureRegion(config.region);

    assignedShards = new ArrayList<>(config.shards);
    shardIterators = new HashMap<>(assignedShards.size());
    currentShardIdx = 0;
}

From source file:ecplugins.s3.S3Util.java

License:Apache License

/**
 * This procedure deletes the bucket along with its contents
 * @param bucketName/*from   w w w . j  a  v  a 2 s  .com*/
 * @return
 * @throws Exception
 */
public static boolean DeleteBucket(String bucketName) throws Exception {

    Properties props = TestUtils.getProperties();

    BasicAWSCredentials credentials = new BasicAWSCredentials(props.getProperty(StringConstants.ACCESS_ID),
            props.getProperty(StringConstants.SECRET_ACCESS_ID));

    // Create TransferManager
    TransferManager tx = new TransferManager(credentials);

    // Get S3 Client
    AmazonS3 s3 = tx.getAmazonS3Client();

    if (s3.doesBucketExist(bucketName)) {
        // Multi-object delete by specifying only keys (no version ID).
        DeleteObjectsRequest multiObjectDeleteRequest = new DeleteObjectsRequest(bucketName).withQuiet(false);

        //get keys
        List<String> keys = new ArrayList<String>();
        ObjectListing objectListing = s3.listObjects(new ListObjectsRequest().withBucketName(bucketName));
        for (S3ObjectSummary objectSummary : objectListing.getObjectSummaries()) {
            keys.add(objectSummary.getKey());
        }

        // Create request that include only object key names.
        List<DeleteObjectsRequest.KeyVersion> justKeys = new ArrayList<DeleteObjectsRequest.KeyVersion>();
        for (String key : keys) {
            justKeys.add(new DeleteObjectsRequest.KeyVersion(key));
        }

        if (justKeys.size() == 0) {
            return false;
        }

        multiObjectDeleteRequest.setKeys(justKeys);
        // Execute DeleteObjects - Amazon S3 add delete marker for each object
        // deletion. The objects no disappear from your bucket (verify).
        DeleteObjectsResult delObjRes = null;

        delObjRes = s3.deleteObjects(multiObjectDeleteRequest);

        s3.deleteBucket(bucketName);
        return true;
    } else {
        System.out.println("Error: Bucket with name " + bucketName + " does not exists.");
        return false;
    }
}

From source file:ecplugins.s3.S3Util.java

License:Apache License

public static void CreateBucket(String bucketName) throws Exception {

    Properties props = TestUtils.getProperties();

    BasicAWSCredentials credentials = new BasicAWSCredentials(props.getProperty(StringConstants.ACCESS_ID),
            props.getProperty(StringConstants.SECRET_ACCESS_ID));

    // Create TransferManager
    TransferManager tx = new TransferManager(credentials);

    // Get S3 Client
    AmazonS3 s3 = tx.getAmazonS3Client();

    s3.createBucket(bucketName);//  w  ww  .  jav  a2  s  .co  m

}

From source file:ecplugins.s3.S3Util.java

License:Apache License

public static boolean CheckIsBucketAvailable(String bucketName) throws Exception {

    Properties props = TestUtils.getProperties();

    BasicAWSCredentials credentials = new BasicAWSCredentials(props.getProperty(StringConstants.ACCESS_ID),
            props.getProperty(StringConstants.SECRET_ACCESS_ID));

    // Create TransferManager
    TransferManager tx = new TransferManager(credentials);

    // Get S3 Client
    AmazonS3 s3 = tx.getAmazonS3Client();

    return s3.doesBucketExist(bucketName);
}