List of usage examples for com.amazonaws.services.s3.model PutObjectRequest PutObjectRequest
public PutObjectRequest(String bucketName, String key, String redirectLocation)
From source file:org.mule.module.s3.simpleapi.content.FileS3ObjectContent.java
License:Open Source License
public PutObjectRequest createPutObjectRequest() { PutObjectRequest request = new PutObjectRequest(null, null, file); request.setMetadata(new ObjectMetadata()); return request; }
From source file:org.nuxeo.aws.elastictranscoder.AWSS3Handler.java
License:Open Source License
public void sendFile(String inKey, File inFile) throws RuntimeException { try {//www . j a v a2 s . c o m amazonS3.putObject(new PutObjectRequest(bucket, inKey, inFile)); } catch (AmazonServiceException ase) { String message = GenericAWSClient.buildDetailedMessageFromAWSException(ase); throw new RuntimeException(message); } catch (AmazonClientException ace) { String message = GenericAWSClient.buildDetailedMessageFromAWSException(ace); throw new RuntimeException(message); } }
From source file:org.nuxeo.s3utils.S3HandlerImpl.java
License:Apache License
@Override public boolean sendFile(String inKey, File inFile) throws NuxeoException { boolean ok = false; try {//ww w. j a va 2 s . c o m s3.putObject(new PutObjectRequest(currentBucket, inKey, inFile)); ok = true; } catch (AmazonServiceException ase) { String message = S3Handler.buildDetailedMessageFromAWSException(ase); throw new NuxeoException(message); } catch (AmazonClientException ace) { String message = S3Handler.buildDetailedMessageFromAWSException(ace); throw new NuxeoException(message); } return ok; }
From source file:org.onebusaway.admin.service.impl.S3FileServiceImpl.java
License:Apache License
@Override public boolean createBundleDirectory(String filename) { try {//from w w w .ja v a 2s .c o m /* * a file needs to be written for a directory to exist create README file, * which could optionally contain meta-data such as creator, production * mode, etc. */ File tmpFile = File.createTempFile("README", "txt"); String contents = "Root of Bundle Build"; FileWriter fw = new FileWriter(tmpFile); fw.append(contents); fw.close(); PutObjectRequest request = new PutObjectRequest(_bucketName, filename + "/README.txt", tmpFile); PutObjectResult result = _s3.putObject(request); // now create tree structure request = new PutObjectRequest(_bucketName, filename + "/" + this.getGtfsPath() + "/README.txt", tmpFile); result = _s3.putObject(request); request = new PutObjectRequest(_bucketName, filename + "/" + this.getAuxPath() + "/README.txt", tmpFile); result = _s3.putObject(request); request = new PutObjectRequest(_bucketName, filename + "/" + this.getBuildPath() + "/README.txt", tmpFile); result = _s3.putObject(request); return result != null; } catch (Exception e) { _log.error(e.toString(), e); throw new RuntimeException(e); } }
From source file:org.onebusaway.admin.service.impl.S3FileServiceImpl.java
License:Apache License
@Override /**// w ww. j av a 2s . c o m * push the contents of the directory to S3 at the given key location. */ public String put(String key, String file) { if (new File(file).isDirectory()) { File dir = new File(file); for (File contents : dir.listFiles()) { try { put(key, contents.getName(), contents.getCanonicalPath()); } catch (IOException ioe) { _log.error(ioe.toString(), ioe); } } return null; } PutObjectRequest request = new PutObjectRequest(this._bucketName, key, new File(file)); PutObjectResult result = _s3.putObject(request); return result.getVersionId(); }
From source file:org.onebusaway.admin.service.impl.S3FileServiceImpl.java
License:Apache License
public String put(String prefix, String key, String file) { if (new File(file).isDirectory()) { File dir = new File(file); for (File contents : dir.listFiles()) { try { put(prefix + "/" + key, contents.getName(), contents.getCanonicalPath()); } catch (IOException ioe) { _log.error(ioe.toString(), ioe); }//w ww.jav a2 s. c om } return null; } String filename = prefix + "/" + key; _log.info("uploading " + file + " to " + filename); PutObjectRequest request = new PutObjectRequest(this._bucketName, filename, new File(file)); PutObjectResult result = _s3.putObject(request); return result.getVersionId(); }
From source file:org.p365.S3Sample.java
License:Open Source License
public static void main(String[] args) throws IOException { /*/*from w ww . ja v a 2s .co m*/ * This credentials provider implementation loads your AWS credentials * from a properties file at the root of your classpath. * * Important: Be sure to fill in your AWS access credentials in the * AwsCredentials.properties file before you try to run this * sample. * http://aws.amazon.com/security-credentials */ AmazonS3 s3 = new AmazonS3Client(new ClasspathPropertiesFileCredentialsProvider()); Region usWest2 = Region.getRegion(Regions.US_WEST_2); s3.setRegion(usWest2); String bucketName = "mynewbuket"; String key = "Myobj/sd.jpg"; System.out.println("==========================================="); System.out.println("Getting Started with Amazon S3"); System.out.println("===========================================\n"); try { /* * Create a new S3 bucket - Amazon S3 bucket names are globally unique, * so once a bucket name has been taken by any user, you can't create * another bucket with that same name. * * You can optionally specify a location for your bucket if you want to * keep your data closer to your applications or users. */ System.out.println("Creating bucket " + bucketName + "\n"); if (!s3.doesBucketExist(bucketName)) { s3.createBucket(bucketName); } /* * List the buckets in your account */ System.out.println("Listing buckets"); for (Bucket bucket : s3.listBuckets()) { System.out.println(" - " + bucket.getName()); } System.out.println(); /* * Upload an object to your bucket - You can easily upload a file to * S3, or upload directly an InputStream if you know the length of * the data in the stream. You can also specify your own metadata * when uploading to S3, which allows you set a variety of options * like content-type and content-encoding, plus additional metadata * specific to your applications. */ System.out.println("Uploading a new object to S3 from a file\n"); String pathname = "D:\\Program Files\\apache-tomcat-7.0.42\\webapps\\WorkerForP365\\src\\AAA_1465.jpg"; File file = new File(pathname); s3.putObject( new PutObjectRequest(bucketName, key, file).withCannedAcl(CannedAccessControlList.PublicRead)); /* * Download an object - When you download an object, you get all of * the object's metadata and a stream from which to read the contents. * It's important to read the contents of the stream as quickly as * possibly since the data is streamed directly from Amazon S3 and your * network connection will remain open until you read all the data or * close the input stream. * * GetObjectRequest also supports several other options, including * conditional downloading of objects based on modification times, * ETags, and selectively downloading a range of an object. */ System.out.println("Downloading an object"); S3Object object = s3.getObject(new GetObjectRequest(bucketName, key)); System.out.println("Content-Type: " + object.getObjectMetadata().getContentType()); displayTextInputStream(object.getObjectContent()); /* * List objects in your bucket by prefix - There are many options for * listing the objects in your bucket. Keep in mind that buckets with * many objects might truncate their results when listing their objects, * so be sure to check if the returned object listing is truncated, and * use the AmazonS3.listNextBatchOfObjects(...) operation to retrieve * additional results. */ System.out.println("Listing objects"); ObjectListing objectListing = s3 .listObjects(new ListObjectsRequest().withBucketName(bucketName).withPrefix("My")); for (S3ObjectSummary objectSummary : objectListing.getObjectSummaries()) { System.out.println( " - " + objectSummary.getKey() + " " + "(size = " + objectSummary.getSize() + ")"); } System.out.println(); /* * Delete an object - Unless versioning has been turned on for your bucket, * there is no way to undelete an object, so use caution when deleting objects. */ //System.out.println("Deleting an object\n"); //s3.deleteObject(bucketName, key); /* * Delete a bucket - A bucket must be completely empty before it can be * deleted, so remember to delete any objects from your buckets before * you try to delete them. */ //System.out.println("Deleting bucket " + bucketName + "\n"); //s3.deleteBucket(bucketName); } catch (AmazonServiceException ase) { System.out.println("Caught an AmazonServiceException, which means your request made it " + "to Amazon S3, but was rejected with an error response for some reason."); System.out.println("Error Message: " + ase.getMessage()); System.out.println("HTTP Status Code: " + ase.getStatusCode()); System.out.println("AWS Error Code: " + ase.getErrorCode()); System.out.println("Error Type: " + ase.getErrorType()); System.out.println("Request ID: " + ase.getRequestId()); } catch (AmazonClientException ace) { System.out.println("Caught an AmazonClientException, which means the client encountered " + "a serious internal problem while trying to communicate with S3, " + "such as not being able to access the network."); System.out.println("Error Message: " + ace.getMessage()); } }
From source file:org.pathirage.ceph.bench.VolumeUploader.java
License:Apache License
private void uploadVolume(String volumeId, File volume) { getS3Connection().putObject(new PutObjectRequest(bucket, volumeId, volume)); }
From source file:org.pathirage.ceph.bench.WriteThroughputBench.java
License:Apache License
private void writeVolumes(List<String> volumes) { for (String volume : volumes) { pool.submit(() -> {//ww w. j a va2 s . c om // When saved to a file, we assume that node running the benchmark has better write throughput than read throughput of Ceph getS3Connection().putObject(new PutObjectRequest(VOL_BUCKET, volume, Paths.get(volumesDirectory, String.format("%s.zip", volume)).toFile())); doneSignal.countDown(); }); } }
From source file:org.pentaho.amazon.client.impl.S3ClientImpl.java
License:Apache License
@Override public void putObjectInBucket(String stagingBucketName, String key, File tmpFile) { s3Client.putObject(new PutObjectRequest(stagingBucketName, key, tmpFile)); }