Example usage for com.amazonaws.services.s3.model PutObjectRequest PutObjectRequest

List of usage examples for com.amazonaws.services.s3.model PutObjectRequest PutObjectRequest

Introduction

In this page you can find the example usage for com.amazonaws.services.s3.model PutObjectRequest PutObjectRequest.

Prototype

public PutObjectRequest(String bucketName, String key, String redirectLocation) 

Source Link

Document

Constructs a new PutObjectRequest object to perform a redirect for the specified bucket and key.

Usage

From source file:iit.edu.supadyay.s3.S3upload.java

public static boolean upload(String bucketName, String uploadFileName, String keyName)
        throws IOException, InterruptedException {

    //access = "AKIAJ2YSLRUZR5B3F5HQ";
    //secret = "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";
    //AWSCredentials credentials = new BasicAWSCredentials(access, secret);
    //AmazonS3 s3client = new AmazonS3Client(getCredentials());
    AmazonS3 s3client = new AmazonS3Client(new InstanceProfileCredentialsProvider());
    try {/*from w ww  . j a v a  2s  . co m*/
        System.out.println("Uploading a new object to S3 from a file\n");
        File file = new File(uploadFileName);
        System.out.println("I am before here\n");
        s3client.createBucket(bucketName);
        System.out.println("I am here\n");

        s3client.putObject(new PutObjectRequest(bucketName, keyName, file));
        s3client.setObjectAcl(bucketName, keyName, CannedAccessControlList.PublicReadWrite);

    } catch (AmazonServiceException ase) {
        System.out.println("Caught an AmazonServiceException, which " + "means your request made it "
                + "to Amazon S3, but was rejected with an error response" + " for some reason.");
        System.out.println("Error Message:    " + ase.getMessage());
        System.out.println("HTTP Status Code: " + ase.getStatusCode());
        System.out.println("AWS Error Code:   " + ase.getErrorCode());
        System.out.println("Error Type:       " + ase.getErrorType());
        System.out.println("Request ID:       " + ase.getRequestId());
        return false;
    } catch (AmazonClientException ace) {
        System.out.println("Caught an AmazonClientException, which " + "means the client encountered "
                + "an internal error while trying to " + "communicate with S3, "
                + "such as not being able to access the network.");
        System.out.println("Error Message: " + ace.getMessage());
        return false;
    }
    return true;
}

From source file:io.dockstore.common.FileProvisioning.java

License:Apache License

public void provisionOutputFile(FileInfo file, String cwlOutputPath) {
    File sourceFile = new File(cwlOutputPath);
    long inputSize = sourceFile.length();
    if (file.getUrl().startsWith("s3://")) {
        AmazonS3 s3Client = FileProvisioning.getAmazonS3Client(config);
        String trimmedPath = file.getUrl().replace("s3://", "");
        List<String> splitPathList = Lists.newArrayList(trimmedPath.split("/"));
        String bucketName = splitPathList.remove(0);

        PutObjectRequest putObjectRequest = new PutObjectRequest(bucketName, Joiner.on("/").join(splitPathList),
                sourceFile);/*from  w  w  w .  j  a  va2s  . c  o  m*/
        putObjectRequest.setGeneralProgressListener(new ProgressListener() {
            ProgressPrinter printer = new ProgressPrinter();
            long runningTotal = 0;

            @Override
            public void progressChanged(ProgressEvent progressEvent) {
                if (progressEvent.getEventType() == ProgressEventType.REQUEST_BYTE_TRANSFER_EVENT) {
                    runningTotal += progressEvent.getBytesTransferred();
                }
                printer.handleProgress(runningTotal, inputSize);
            }
        });
        try {
            s3Client.putObject(putObjectRequest);
        } finally {
            System.out.println();
        }
    } else {
        try {
            FileSystemManager fsManager;
            // trigger a copy from the URL to a local file path that's a UUID to avoid collision
            fsManager = VFS.getManager();
            // check for a local file path
            FileObject dest = fsManager.resolveFile(file.getUrl());
            FileObject src = fsManager.resolveFile(sourceFile.getAbsolutePath());
            copyFromInputStreamToOutputStream(src.getContent().getInputStream(), inputSize,
                    dest.getContent().getOutputStream());
        } catch (IOException e) {
            throw new RuntimeException("Could not provision output files", e);
        }
    }
}

From source file:io.druid.storage.s3.S3DataSegmentPusher.java

License:Apache License

private void uploadFileIfPossible(AmazonS3 s3Client, String bucket, String key, File file,
        boolean replaceExisting) {
    if (!replaceExisting && S3Utils.isObjectInBucketIgnoringPermission(s3Client, bucket, key)) {
        log.info("Skipping push because key [%s] exists && replaceExisting == false", key);
    } else {//from  w w w  . j a  va 2 s  .  c  o  m
        final PutObjectRequest indexFilePutRequest = new PutObjectRequest(bucket, key, file);

        if (!config.getDisableAcl()) {
            indexFilePutRequest.setAccessControlList(S3Utils.grantFullControlToBucketOwner(s3Client, bucket));
        }
        log.info("Pushing [%s] to bucket[%s] and key[%s].", file, bucket, key);
        s3Client.putObject(indexFilePutRequest);
    }
}

From source file:io.druid.storage.s3.ServerSideEncryptingAmazonS3.java

License:Apache License

public PutObjectResult putObject(String bucket, String key, File file) {
    return putObject(new PutObjectRequest(bucket, key, file));
}

From source file:io.fastup.maven.plugin.app.DeployPutRequestMaker.java

License:Open Source License

PutObjectRequest makePutRequest() throws MojoExecutionException {
    final TvaritEnvironment tvaritEnvironment = TvaritEnvironment.getInstance();
    tvaritEnvironment.<AppDeployerMojo>getMojo().getArtifactBucketName();
    final File warFile = tvaritEnvironment.getMavenProject().getArtifact().getFile();
    String projectArtifactId = tvaritEnvironment.getMavenProject().getArtifactId();
    String projectVersion = tvaritEnvironment.getMavenProject().getVersion();
    final String projectGroupId = tvaritEnvironment.getMavenProject().getGroupId();
    final String key = "deployables/" + projectGroupId + "/" + projectArtifactId + "/" + projectVersion + "/"
            + warFile.getName();/*from   ww w .j  a  v a  2 s .  com*/

    final String bucketName = tvaritEnvironment.getArtifactBucketName();
    final PutObjectRequest putObjectRequest = new PutObjectRequest(bucketName, key, warFile);
    final ObjectMetadata metadata = new ObjectMetadata();
    final Map<String, String> userMetadata = new HashMap<>();
    userMetadata.put("project_name", tvaritEnvironment.getProjectName());
    userMetadata.put("health_check_url", tvaritEnvironment.<AppDeployerMojo>getMojo().getHealthCheckUrl());
    userMetadata.put("private_key_name", tvaritEnvironment.<AppDeployerMojo>getMojo().getSshKeyName());
    userMetadata.put("db-version", tvaritEnvironment.<AppDeployerMojo>getMojo().getDbVersion());
    userMetadata.put("group-id", tvaritEnvironment.getMavenProject().getGroupId());
    userMetadata.put("artifact-id", tvaritEnvironment.getMavenProject().getArtifactId());
    userMetadata.put("version", tvaritEnvironment.getMavenProject().getVersion());
    userMetadata.put("app_fqdn", tvaritEnvironment.<AppDeployerMojo>getMojo().getAppFqdn());
    userMetadata.put("db-name", tvaritEnvironment.<AppDeployerMojo>getMojo().getDbName());
    userMetadata.put("db-username", tvaritEnvironment.<AppDeployerMojo>getMojo().getDbUsername());
    userMetadata.put("db-password", tvaritEnvironment.<AppDeployerMojo>getMojo().getDbPassword());
    final String contextConfigUrl = tvaritEnvironment.<AppDeployerMojo>getMojo().getContextConfigUrl();
    final URL url;
    try {
        url = new TemplateUrlMaker().makeUrl(contextConfigUrl);
    } catch (MalformedURLException e) {
        throw new MojoExecutionException("failed", e);
    }
    userMetadata.put("context_config_url", url.toString());
    final String contextRoot = tvaritEnvironment.<AppDeployerMojo>getMojo().getContextRoot();
    userMetadata.put("context_root", contextRoot.equals("/") ? "ROOT" : contextRoot);
    metadata.setUserMetadata(userMetadata);
    putObjectRequest.withMetadata(metadata);
    return putObjectRequest;
}

From source file:io.ingenieux.lambda.shell.LambdaShell.java

License:Apache License

private void copyFile(String sourceFile, String targetPath) {
    AmazonS3 s3Client = new AmazonS3Client();

    AmazonS3URI uri = new AmazonS3URI(targetPath);

    String key = uri.getKey();/*from  w  ww  . j  a  v  a 2 s .  c o  m*/

    String bucketName = uri.getBucket();

    s3Client.putObject(new PutObjectRequest(bucketName, key, new File(sourceFile)));
}

From source file:io.minio.awssdk.tests.S3TestUtils.java

License:Apache License

void uploadObject(String bucketName, String keyName, String filePath, SSECustomerKey sseKey)
        throws IOException {

    File f = new File(filePath);
    PutObjectRequest putObjectRequest = new PutObjectRequest(bucketName, keyName, f);
    if (sseKey != null) {
        putObjectRequest.withSSECustomerKey(sseKey);
    }//from  w  ww . j a  va  2  s.c  o  m
    s3Client.putObject(putObjectRequest);
}

From source file:io.stallion.services.S3StorageService.java

License:Open Source License

public void uploadFile(File file, String bucket, String fileKey, boolean isPublic, String contentType,
        Map<String, String> headers) {
    client.putObject(bucket, fileKey, file);
    PutObjectRequest req = new PutObjectRequest(bucket, fileKey, file);
    if (isPublic) {
        req.withCannedAcl(CannedAccessControlList.PublicRead);
    }//from ww w .j a v a 2 s.c o m
    ObjectMetadata meta = new ObjectMetadata();

    if (headers != null) {
        for (String key : headers.keySet()) {
            meta.setHeader(key, headers.get(key));
        }
    }
    if (!empty(contentType)) {
        meta.setContentType(contentType);
    }
    req.setMetadata(meta);
    client.putObject(req);

}

From source file:itcr.gitsnes.MainActivity.java

License:Open Source License

/** The method sendGame makes many functions:
 *      - Get all data from text boxes on layout add_game
 *      - Makes a random bucket key for a photo/file and put the object into the bucket
 *      - Wait for the success signal and send the JSON build from BackendHandler
 *        to app-engine (Google)/*from   w  w w . j ava  2s  .c  om*/
 */
public void sendGame(View view) throws IOException {

    Toast.makeText(this, "Wait, we are uploading your game =) ", Toast.LENGTH_LONG);
    /* GET DATA FROM INTERFACE*/
    EditText name = (EditText) this.findViewById(R.id.txt_name);
    EditText description = (EditText) this.findViewById(R.id.txt_desc);
    EditText category = (EditText) this.findViewById(R.id.txt_cat);

    StrictMode.ThreadPolicy policy = new StrictMode.ThreadPolicy.Builder().permitAll().build();
    StrictMode.setThreadPolicy(policy);

    /* GENERATE RANDOM KEYS FOR PHOTO AND FILE*/
    this.file_key = "" + UUID.randomUUID().toString().replace("-", "");
    this.image_key = "" + UUID.randomUUID().toString().replace("-", "");

    /* SAVING GAME FILE/PHOTO ON AWS BUCKET*/
    AmazonS3Client s3Client = new AmazonS3Client(
            new BasicAWSCredentials(KS.MY_ACCESS_KEY_ID, KS.MY_SECRET_KEY));

    PutObjectRequest putObjectRequestnew = new PutObjectRequest(KS.BUCKET_NAME, this.file_key, this.s3game);
    putObjectRequestnew.setCannedAcl(CannedAccessControlList.PublicRead);
    s3Client.putObject(putObjectRequestnew);

    PutObjectRequest putObjectImagenew = new PutObjectRequest(KS.BUCKET_IMG, this.image_key, this.s3image);
    putObjectImagenew.setCannedAcl(CannedAccessControlList.PublicRead);
    s3Client.putObject(putObjectImagenew);

    String actual_key = "none";
    String actual_image = "none";

    if (this.file_key != "none")
        actual_key = this.file_key;

    if (this.image_key != "none")
        actual_image = this.image_key;

    /* SEND JSON*/
    new BackendHandler().sendJSON(KS.getCurrent_user(), name.getText().toString(),
            category.getText().toString(), description.getText().toString(), actual_image, actual_key);
    Log.i(TAG, "Successful JSON send");
    Toast.makeText(this, "Congratulations your game has been sent", Toast.LENGTH_LONG);

}

From source file:jetbrains.buildServer.codepipeline.CodePipelineBuildListener.java

License:Apache License

private void processJobOutput(@NotNull final AgentRunningBuild build,
        @NotNull final BuildFinishedStatus buildStatus) {
    if (myJobID == null)
        return;/*w w  w .j  av a 2  s .com*/

    AWSCommonParams.withAWSClients(build.getSharedConfigParameters(),
            new AWSCommonParams.WithAWSClients<Void, RuntimeException>() {
                @Nullable
                @Override
                public Void run(@NotNull AWSClients clients) throws RuntimeException {
                    AWSCodePipelineClient codePipelineClient = null;
                    try {
                        codePipelineClient = clients.createCodePipeLineClient();
                        if (build.isBuildFailingOnServer()) {
                            publishJobFailure(codePipelineClient, build, "Build failed");
                        } else if (BuildFinishedStatus.INTERRUPTED == buildStatus) {
                            publishJobFailure(codePipelineClient, build, "Build interrupted");
                        } else {
                            final Map<String, String> params = build.getSharedConfigParameters();
                            final JobData jobData = getJobData(codePipelineClient, params);

                            final List<Artifact> outputArtifacts = jobData.getOutputArtifacts();
                            if (outputArtifacts.isEmpty()) {
                                LOG.debug(msgForBuild(
                                        "No output artifacts expected for the job with ID: " + myJobID, build));
                            } else {
                                final File artifactOutputFolder = new File(
                                        params.get(ARTIFACT_OUTPUT_FOLDER_CONFIG_PARAM));

                                S3Util.withTransferManager(
                                        getArtifactS3Client(jobData.getArtifactCredentials(), params),
                                        new S3Util.WithTransferManager<Upload>() {
                                            @NotNull
                                            @Override
                                            public Collection<Upload> run(
                                                    @NotNull final TransferManager manager) throws Throwable {
                                                return CollectionsUtil.convertCollection(outputArtifacts,
                                                        new Converter<Upload, Artifact>() {
                                                            @Override
                                                            public Upload createFrom(
                                                                    @NotNull Artifact artifact) {
                                                                final File buildArtifact = getBuildArtifact(
                                                                        artifact,
                                                                        jobData.getPipelineContext()
                                                                                .getPipelineName(),
                                                                        artifactOutputFolder, build);
                                                                final S3ArtifactLocation s3Location = artifact
                                                                        .getLocation().getS3Location();

                                                                build.getBuildLogger().message(
                                                                        "Uploading job output artifact "
                                                                                + s3Location.getObjectKey()
                                                                                + " from " + buildArtifact
                                                                                        .getAbsolutePath());
                                                                return manager.upload(new PutObjectRequest(
                                                                        s3Location.getBucketName(),
                                                                        s3Location.getObjectKey(),
                                                                        buildArtifact)
                                                                                .withSSEAwsKeyManagementParams(
                                                                                        getSSEAwsKeyManagementParams(
                                                                                                jobData.getEncryptionKey())));
                                                            }
                                                        });
                                            }
                                        });

                                publishJobSuccess(codePipelineClient, build);
                            }
                        }
                    } catch (Throwable e) {
                        failOnException(codePipelineClient, build, e);
                    }
                    return null;
                }
            });
}