Example usage for com.amazonaws.services.s3 AmazonS3ClientBuilder standard

List of usage examples for com.amazonaws.services.s3 AmazonS3ClientBuilder standard

Introduction

In this page you can find the example usage for com.amazonaws.services.s3 AmazonS3ClientBuilder standard.

Prototype

public static AmazonS3ClientBuilder standard() 

Source Link

Usage

From source file:nl.nn.adapterframework.filesystem.AmazonS3FileSystem.java

License:Apache License

@Override
public void open() {
    CredentialFactory cf = new CredentialFactory(getAuthAlias(), getAccessKey(), getSecretKey());
    BasicAWSCredentials awsCreds = new BasicAWSCredentials(cf.getUsername(), cf.getPassword());
    AmazonS3ClientBuilder s3ClientBuilder = AmazonS3ClientBuilder.standard()
            .withChunkedEncodingDisabled(isChunkedEncodingDisabled())
            .withForceGlobalBucketAccessEnabled(isForceGlobalBucketAccessEnabled())
            .withRegion(getClientRegion()).withCredentials(new AWSStaticCredentialsProvider(awsCreds));
    s3Client = s3ClientBuilder.build();//from w w w. j ava2  s.com
}

From source file:org.apache.beam.sdk.io.aws.s3.DefaultS3ClientBuilderFactory.java

License:Apache License

@Override
public AmazonS3ClientBuilder createBuilder(S3Options s3Options) {
    AmazonS3ClientBuilder builder = AmazonS3ClientBuilder.standard()
            .withCredentials(s3Options.getAwsCredentialsProvider());

    if (s3Options.getClientConfiguration() != null) {
        builder = builder.withClientConfiguration(s3Options.getClientConfiguration());
    }//from  w  w w  .j a  v a2 s.  co  m

    if (!Strings.isNullOrEmpty(s3Options.getAwsServiceEndpoint())) {
        builder = builder.withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(
                s3Options.getAwsServiceEndpoint(), s3Options.getAwsRegion()));
    } else if (!Strings.isNullOrEmpty(s3Options.getAwsRegion())) {
        builder = builder.withRegion(s3Options.getAwsRegion());
    } else {
        LOG.info("The AWS S3 Beam extension was included in this build, but the awsRegion flag "
                + "was not specified. If you don't plan to use S3, then ignore this message.");
    }
    return builder;
}

From source file:org.apache.heron.uploader.s3.S3Uploader.java

License:Apache License

@Override
public void initialize(Config config) {
    bucket = S3Context.bucket(config);
    String accessKey = S3Context.accessKey(config);
    String accessSecret = S3Context.secretKey(config);
    String awsProfile = S3Context.awsProfile(config);
    String proxy = S3Context.proxyUri(config);
    String endpoint = S3Context.uri(config);
    String customRegion = S3Context.region(config);
    AmazonS3ClientBuilder builder = AmazonS3ClientBuilder.standard();

    if (Strings.isNullOrEmpty(bucket)) {
        throw new RuntimeException("Missing heron.uploader.s3.bucket config value");
    }//from  ww w .j a v a 2s.c om

    // If an accessKey is specified, use it. Otherwise check if an aws profile
    // is specified. If neither was set just use the DefaultAWSCredentialsProviderChain
    // by not specifying a CredentialsProvider.
    if (!Strings.isNullOrEmpty(accessKey) || !Strings.isNullOrEmpty(accessSecret)) {

        if (!Strings.isNullOrEmpty(awsProfile)) {
            throw new RuntimeException("Please provide access_key/secret_key " + "or aws_profile, not both.");
        }

        if (Strings.isNullOrEmpty(accessKey)) {
            throw new RuntimeException("Missing heron.uploader.s3.access_key config value");
        }

        if (Strings.isNullOrEmpty(accessSecret)) {
            throw new RuntimeException("Missing heron.uploader.s3.secret_key config value");
        }
        builder.setCredentials(
                new AWSStaticCredentialsProvider(new BasicAWSCredentials(accessKey, accessSecret)));
    } else if (!Strings.isNullOrEmpty(awsProfile)) {
        builder.setCredentials(new ProfileCredentialsProvider(awsProfile));
    }

    if (!Strings.isNullOrEmpty(proxy)) {
        URI proxyUri;

        try {
            proxyUri = new URI(proxy);
        } catch (URISyntaxException e) {
            throw new RuntimeException("Invalid heron.uploader.s3.proxy_uri config value: " + proxy, e);
        }

        ClientConfiguration clientCfg = new ClientConfiguration();
        clientCfg.withProtocol(Protocol.HTTPS).withProxyHost(proxyUri.getHost())
                .withProxyPort(proxyUri.getPort());

        if (!Strings.isNullOrEmpty(proxyUri.getUserInfo())) {
            String[] info = proxyUri.getUserInfo().split(":", 2);
            clientCfg.setProxyUsername(info[0]);
            if (info.length > 1) {
                clientCfg.setProxyPassword(info[1]);
            }
        }

        builder.setClientConfiguration(clientCfg);
    }

    s3Client = builder.withRegion(customRegion).withPathStyleAccessEnabled(true)
            .withChunkedEncodingDisabled(true).withPayloadSigningEnabled(true).build();

    if (!Strings.isNullOrEmpty(endpoint)) {
        s3Client.setEndpoint(endpoint);
    }

    final String topologyName = Context.topologyName(config);
    final String topologyPackageLocation = Context.topologyPackageFile(config);

    pathPrefix = S3Context.pathPrefix(config);
    packageFileHandler = new File(topologyPackageLocation);

    // The path the packaged topology will be uploaded to
    remoteFilePath = generateS3Path(pathPrefix, topologyName, packageFileHandler.getName());

    // Generate the location of the backup file incase we need to revert the deploy
    previousVersionFilePath = generateS3Path(pathPrefix, topologyName,
            "previous_" + packageFileHandler.getName());
}

From source file:org.codice.ddf.catalog.plugin.metacard.backup.storage.s3storage.MetacardS3StorageRoute.java

License:Open Source License

private AmazonS3 getS3Client() {
    AwsClientBuilder.EndpointConfiguration endpointConfiguration = new AwsClientBuilder.EndpointConfiguration(
            s3Endpoint, getS3Region());
    if (StringUtils.isNotBlank(s3AccessKey)) {
        AWSCredentials awsCredentials = new BasicAWSCredentials(s3AccessKey, s3SecretKey);
        AWSCredentialsProvider credentialsProvider = new AWSStaticCredentialsProvider(awsCredentials);
        return AmazonS3ClientBuilder.standard().withCredentials(credentialsProvider)
                .withEndpointConfiguration(endpointConfiguration).build();
    }//w ww.jav a 2 s . com
    return AmazonS3ClientBuilder.standard().withEndpointConfiguration(endpointConfiguration).build();
}

From source file:org.deeplearning4j.legacyExamples.EmrSparkExample.java

License:Apache License

public void entryPoint(String[] args) {
    JCommander jcmdr = new JCommander(this);
    try {//  www  .j  av  a 2s.  co  m
        jcmdr.parse(args);
    } catch (ParameterException e) {
        jcmdr.usage();
        try {
            Thread.sleep(500);
        } catch (Exception e2) {
        }
        throw e;
    }

    AmazonElasticMapReduceClientBuilder builder = AmazonElasticMapReduceClientBuilder.standard();
    builder.withRegion(region);
    builder.withCredentials(getCredentialsProvider());

    AmazonElasticMapReduce emr = builder.build();

    List<StepConfig> steps = new ArrayList<>();

    if (upload) {
        log.info("uploading uber jar");

        AmazonS3ClientBuilder s3builder = AmazonS3ClientBuilder.standard();
        s3builder.withRegion(region);
        s3builder.withCredentials(getCredentialsProvider());
        AmazonS3 s3Client = s3builder.build();

        if (!s3Client.doesBucketExist(bucketName)) {
            s3Client.createBucket(bucketName);
        }

        File uberJarFile = new File(uberJar);

        s3Client.putObject(new PutObjectRequest(bucketName, uberJarFile.getName(), uberJarFile));
    }

    if (debug) {
        log.info("enable debug");

        StepFactory stepFactory = new StepFactory(builder.getRegion() + ".elasticmapreduce");
        StepConfig enableDebugging = new StepConfig().withName("Enable Debugging")
                .withActionOnFailure(ActionOnFailure.TERMINATE_JOB_FLOW)
                .withHadoopJarStep(stepFactory.newEnableDebuggingStep());
        steps.add(enableDebugging);
    }

    if (execute) {
        log.info("execute spark step");

        HadoopJarStepConfig sparkStepConf = new HadoopJarStepConfig();
        sparkStepConf.withJar("command-runner.jar");
        sparkStepConf.withArgs("spark-submit", "--deploy-mode", "cluster", "--class", className,
                getS3UberJarUrl(), "-useSparkLocal", "false");

        ActionOnFailure action = ActionOnFailure.TERMINATE_JOB_FLOW;

        if (keepAlive) {
            action = ActionOnFailure.CONTINUE;
        }

        StepConfig sparkStep = new StepConfig().withName("Spark Step").withActionOnFailure(action)
                .withHadoopJarStep(sparkStepConf);
        steps.add(sparkStep);
    }

    log.info("create spark cluster");

    Application sparkApp = new Application().withName("Spark");

    // service and job flow role will be created automatically when
    // launching cluster in aws console, better do that first or create
    // manually

    RunJobFlowRequest request = new RunJobFlowRequest().withName("Spark Cluster").withSteps(steps)
            .withServiceRole("EMR_DefaultRole").withJobFlowRole("EMR_EC2_DefaultRole")
            .withApplications(sparkApp).withReleaseLabel(emrVersion).withLogUri(getS3BucketLogsUrl())
            .withInstances(new JobFlowInstancesConfig().withEc2KeyName("spark").withInstanceCount(instanceCount)
                    .withKeepJobFlowAliveWhenNoSteps(keepAlive).withMasterInstanceType(instanceType)
                    .withSlaveInstanceType(instanceType));

    RunJobFlowResult result = emr.runJobFlow(request);

    log.info(result.toString());

    log.info("done");
}

From source file:org.duracloud.audit.reader.impl.AuditLogReaderImpl.java

License:Apache License

protected StorageProvider getStorageProvider() {
    AWSCredentials creds = new DefaultAWSCredentialsProviderChain().getCredentials();
    AmazonS3 s3client = AmazonS3ClientBuilder.standard().build();
    return new S3StorageProvider(s3client, creds.getAWSAccessKeyId(), null);
}

From source file:org.duracloud.common.rest.spring.XmlWebApplicationContext.java

License:Apache License

@Override
protected Resource getResourceByPath(String path) {
    if (path.startsWith("s3://")) {
        AmazonS3 client = AmazonS3ClientBuilder.standard().build();
        AmazonS3URI s3Uri = new AmazonS3URI(path);
        S3Object s3Obj = client.getObject(new GetObjectRequest(s3Uri.getBucket(), s3Uri.getKey()));
        s3Obj.getObjectContent();/*from   w  ww . j av  a 2 s  .  c  o m*/

        return new InputStreamResource(s3Obj.getObjectContent());
    }
    return super.getResourceByPath(path);
}

From source file:org.duracloud.integration.durastore.storage.probe.ProbedRestS3Client.java

License:Apache License

public ProbedRestS3Client(BasicAWSCredentials credentials) throws AmazonServiceException {
    this.s3Client = AmazonS3ClientBuilder.standard()
            .withCredentials(new AWSStaticCredentialsProvider(credentials)).build();
}

From source file:org.duracloud.s3storage.S3ProviderUtil.java

License:Apache License

private static AmazonS3 newS3Client(String accessKey, String secretKey, Region region) {
    BasicAWSCredentials awsCredentials = new BasicAWSCredentials(accessKey, secretKey);
    try {//w  ww. ja v  a2  s  .  c o  m
        String awsRegion = null;
        if (region != null) {
            awsRegion = region.getName();
        } else {
            awsRegion = System.getProperty(AWS_REGION.name());
        }
        AmazonS3 s3Client = AmazonS3ClientBuilder.standard()
                .withCredentials(new AWSStaticCredentialsProvider(awsCredentials)).withRegion(awsRegion)
                .build();
        return s3Client;
    } catch (AmazonServiceException e) {
        String err = "Could not create connection to Amazon S3 due " + "to error: " + e.getMessage();
        throw new StorageException(err, e, RETRY);
    }
}

From source file:org.duracloud.s3storage.S3ProviderUtil.java

License:Apache License

/**
 * @param s3Url using the s3://bucket/object syntax.
 * @return/*from   ww  w  .j a va  2s .co m*/
 * @throws IOException
 */
public static Resource getS3ObjectByUrl(String s3Url) throws IOException {
    AmazonS3 client = AmazonS3ClientBuilder.standard().build();
    AmazonS3URI s3Uri = new AmazonS3URI(s3Url);
    S3Object s3Obj = client.getObject(new GetObjectRequest(s3Uri.getBucket(), s3Uri.getKey()));
    s3Obj.getObjectContent();
    Resource resource = new InputStreamResource(s3Obj.getObjectContent());
    return resource;
}