List of usage examples for com.amazonaws AmazonServiceException getErrorCode
public String getErrorCode()
From source file:org.apache.usergrid.apm.service.ApplicationServiceImpl.java
License:Apache License
boolean deleteSQSQueue(String appName) { log.info("Deleting Queue for App : " + appName.toString()); DeleteQueueRequest deleteQueueRequest = new DeleteQueueRequest(); deleteQueueRequest.setQueueUrl(AWSUtil.formFullQueueUrl(appName.toString())); try {/* w w w. ja v a 2 s . co m*/ sqsClient.deleteQueue(deleteQueueRequest); return true; } catch (AmazonServiceException ase) { if (ase.getErrorCode().equals("AWS.SimpleQueueService.NonExistentQueue")) { log.info("Queue for app : " + appName.toString() + " was probably already deleted. " + ase.getMessage()); } else { log.error(ase); } } catch (AmazonClientException ace) { log.error(ace); } return false; }
From source file:org.apereo.portal.portlets.dynamicskin.storage.s3.AwsS3DynamicSkinService.java
License:Apache License
private void logAmazonServiceException(final AmazonServiceException exception, final AmazonWebServiceRequest request) { log.info("Caught an AmazonServiceException, which means your request made it " + "to Amazon S3, but was rejected with an error response for some reason."); log.info("Error Message: {}", exception.getMessage()); log.info("HTTP Status Code: {}", exception.getStatusCode()); log.info("AWS Error Code: {}", exception.getErrorCode()); log.info("Error Type: {}", exception.getErrorType()); log.info("Request ID: {}", exception.getRequestId()); }
From source file:org.applicationMigrator.serverAgent.ServerAgentFileTransferClient.java
License:Apache License
private void uploadFile(AWSCredentials awsCredentials, String sourcePathString, String destinationPathString, boolean forceUpload) throws FileNotFoundException { // TODO Think about one file being used by many apps (e.g HP1.pdf read // through Adobe reader and OpenOffice) AmazonS3 s3client = new AmazonS3Client(awsCredentials); boolean fileIsPresentOnServer = checkIfFileIsPresentOnServer(s3client, BUCKET_NAME, destinationPathString); if (fileIsPresentOnServer && !forceUpload) return;/*from www .j ava2 s .c o m*/ try { File file = new File(sourcePathString); if (!file.exists()) throw new FileNotFoundException(); s3client.putObject(new PutObjectRequest(BUCKET_NAME, destinationPathString, file)); } catch (AmazonServiceException ase) { System.out.println("Caught an AmazonServiceException, which " + "means your request made it " + "to Amazon S3, but was rejected with an error response" + " for some reason."); System.out.println("Error Message: " + ase.getMessage()); System.out.println("HTTP Status Code: " + ase.getStatusCode()); System.out.println("AWS Error Code: " + ase.getErrorCode()); System.out.println("Error Type: " + ase.getErrorType()); System.out.println("Request ID: " + ase.getRequestId()); throw ase; } catch (AmazonClientException ace) { System.out.println("Caught an AmazonClientException, which " + "means the client encountered " + "an internal error while trying to " + "communicate with S3, " + "such as not being able to access the network."); System.out.println("Error Message: " + ace.getMessage()); throw ace; } // TODO:verify completion of upload operation }
From source file:org.boriken.s3fileuploader.S3SampleRefactored.java
License:Open Source License
public static void main(String[] args) throws IOException { /*/*from w w w . java 2 s . c o m*/ * Important: Be sure to fill in your AWS access credentials in the * AwsCredentials.properties file before you try to run this * sample. * http://aws.amazon.com/security-credentials */ AmazonS3 s3 = new AmazonS3Client(new PropertiesCredentials( S3SampleRefactored.class.getResourceAsStream("../conf/AwsCredentials.properties"))); // String bucketName = "chamakits-my-first-s3-bucket-" + UUID.randomUUID(); String bucketName = "chamakits-HelloS3"; String key = "somekey"; System.out.println("==========================================="); System.out.println("Getting Started with Amazon S3"); System.out.println("===========================================\n"); try { // createBucket(s3,bucketName); // listBuckets(s3); // createFile(s3,bucketName,key); // downloadFile(s3,bucketName,key); // listFiles(s3, bucketName,""); // deleteFile(s3, bucketName, key); // deleteBucket(s3, bucketName); listFiles(s3, bucketName, ""); } catch (AmazonServiceException ase) { System.out.println("Caught an AmazonServiceException, which means your request made it " + "to Amazon S3, but was rejected with an error response for some reason."); System.out.println("Error Message: " + ase.getMessage()); System.out.println("HTTP Status Code: " + ase.getStatusCode()); System.out.println("AWS Error Code: " + ase.getErrorCode()); System.out.println("Error Type: " + ase.getErrorType()); System.out.println("Request ID: " + ase.getRequestId()); } catch (AmazonClientException ace) { System.out.println("Caught an AmazonClientException, which means the client encountered " + "a serious internal problem while trying to communicate with S3, " + "such as not being able to access the network."); System.out.println("Error Message: " + ace.getMessage()); } }
From source file:org.broadleafcommerce.vendor.amazon.s3.S3FileServiceProvider.java
License:Apache License
/** * Writes the resource to S3. If the bucket returns as "NoSuchBucket" then will attempt to create the bucket * and try again.//from w ww. j a va 2 s . com */ @Override public List<String> addOrUpdateResourcesForPaths(FileWorkArea workArea, List<File> files, boolean removeFilesFromWorkArea) { S3Configuration s3config = s3ConfigurationService.lookupS3Configuration(); AmazonS3Client s3 = getAmazonS3Client(s3config); try { return addOrUpdateResourcesInternal(s3config, s3, workArea, files, removeFilesFromWorkArea); } catch (AmazonServiceException ase) { if ("NoSuchBucket".equals(ase.getErrorCode())) { s3.createBucket(s3config.getDefaultBucketName()); return addOrUpdateResourcesInternal(s3config, s3, workArea, files, removeFilesFromWorkArea); } else { throw new RuntimeException(ase); } } }
From source file:org.broadleafcommerce.vendor.amazon.s3.S3FileServiceProvider.java
License:Apache License
public void addOrUpdateResource(InputStream inputStream, String fileName, long fileSizeInBytes) { S3Configuration s3config = s3ConfigurationService.lookupS3Configuration(); AmazonS3Client s3 = getAmazonS3Client(s3config); try {//from w w w .ja v a 2s.c om addOrUpdateResourcesInternalStreamVersion(s3config, s3, inputStream, fileName, fileSizeInBytes); } catch (AmazonServiceException ase) { if ("NoSuchBucket".equals(ase.getErrorCode())) { s3.createBucket(s3config.getDefaultBucketName()); addOrUpdateResourcesInternalStreamVersion(s3config, s3, inputStream, fileName, fileSizeInBytes); } else { throw new RuntimeException(ase); } } }
From source file:org.chodavarapu.jgitaws.repositories.PackRepository.java
License:Eclipse Distribution License
public DfsOutputStream savePack(String repositoryName, String packName, long length) throws IOException { PipedInputStream pipedInputStream = new PipedInputStream(configuration.getStreamingBlockSize()); ObjectMetadata metaData = new ObjectMetadata(); metaData.setContentLength(length);/*from w w w .j a v a 2s . c o m*/ String objectName = objectName(repositoryName, packName); Async.fromAction(() -> { logger.debug("Attempting to save pack {} to S3 bucket", objectName); try { configuration.getS3Client().putObject(configuration.getPacksBucketName(), objectName, pipedInputStream, metaData); } catch (AmazonServiceException e) { if ("InvalidBucketName".equals(e.getErrorCode()) || "InvalidBucketState".equals(e.getErrorCode())) { logger.debug("S3 packs bucket does not exist yet, creating it"); configuration.getS3Client() .createBucket(new CreateBucketRequest(configuration.getPacksBucketName())); configuration.getS3Client().setBucketVersioningConfiguration( new SetBucketVersioningConfigurationRequest(configuration.getPacksBucketName(), new BucketVersioningConfiguration(BucketVersioningConfiguration.OFF))); logger.debug("Created bucket, saving pack {}", objectName); configuration.getS3Client().putObject(configuration.getPacksBucketName(), objectName, pipedInputStream, metaData); } else { throw e; } } }, null, Schedulers.io()); return new PipedDfsOutputStream(pipedInputStream, objectName, (int) length, configuration.getStreamingBlockSize()); }
From source file:org.crypto.sse.IEX2LevAMAZON.java
License:Open Source License
/** * @param args/* ww w .j av a 2s.c o m*/ * @throws Exception */ @SuppressWarnings("null") public static void main(String[] args) throws Exception { //First Job Configuration conf = new Configuration(); Job job = Job.getInstance(conf, "IEX-2Lev"); job.setJarByClass(IEX2LevAMAZON.class); job.setMapperClass(MLK1.class); job.setReducerClass(RLK1.class); job.setMapOutputKeyClass(Text.class); job.setMapOutputValueClass(Text.class); job.setOutputKeyClass(Text.class); job.setNumReduceTasks(1); job.setOutputValueClass(ArrayListWritable.class); job.setInputFormatClass(FileNameKeyInputFormat.class); FileInputFormat.addInputPath(job, new Path(args[0])); FileOutputFormat.setOutputPath(job, new Path(args[1])); //Second Job Configuration conf2 = new Configuration(); Job job2 = Job.getInstance(conf2, "IEX-2Lev"); job2.setJarByClass(IEX2LevAMAZON.class); job2.setMapperClass(MLK2.class); job2.setReducerClass(RLK2.class); job2.setNumReduceTasks(1); job2.setMapOutputKeyClass(Text.class); job2.setMapOutputValueClass(Text.class); job2.setOutputKeyClass(Text.class); job2.setOutputValueClass(ArrayListWritable.class); job2.setInputFormatClass(FileNameKeyInputFormat.class); FileInputFormat.addInputPath(job2, new Path(args[0])); FileOutputFormat.setOutputPath(job2, new Path(args[2])); job.waitForCompletion(true); job2.waitForCompletion(true); //Here add your Amazon Credentials AWSCredentials credentials = new BasicAWSCredentials("XXXXXXXXXXXXXXXX", "XXXXXXXXXXXXXXXX"); // create a client connection based on credentials AmazonS3 s3client = new AmazonS3Client(credentials); // create bucket - name must be unique for all S3 users String bucketName = "iexmaptest"; S3Object s3object = s3client.getObject(new GetObjectRequest(bucketName, args[4])); System.out.println(s3object.getObjectMetadata().getContentType()); System.out.println(s3object.getObjectMetadata().getContentLength()); List<String> lines = new ArrayList<String>(); String folderName = "2"; BufferedReader reader = new BufferedReader(new InputStreamReader(s3object.getObjectContent())); String line; int counter = 0; while ((line = reader.readLine()) != null) { // can copy the content locally as well // using a buffered writer lines.add(line); System.out.println(line); // upload file to folder String fileName = folderName + "/" + Integer.toString(counter); ByteArrayInputStream input = new ByteArrayInputStream(line.getBytes()); s3client.putObject(bucketName, fileName, input, new ObjectMetadata()); counter++; } Multimap<String, String> lookup = ArrayListMultimap.create(); for (int i = 0; i < lines.size(); i++) { String[] tokens = lines.get(i).split("\\s+"); for (int j = 1; j < tokens.length; j++) { lookup.put(tokens[0], tokens[j]); } } // Loading inverted index that associates files identifiers to keywords lines = new ArrayList<String>(); s3object = s3client.getObject(new GetObjectRequest(bucketName, args[5])); System.out.println(s3object.getObjectMetadata().getContentType()); System.out.println(s3object.getObjectMetadata().getContentLength()); // Loading inverted index that associates keywords to identifiers reader = new BufferedReader(new InputStreamReader(s3object.getObjectContent())); while ((line = reader.readLine()) != null) { lines.add(line); } Multimap<String, String> lookup2 = ArrayListMultimap.create(); for (int i = 0; i < lines.size(); i++) { String[] tokens = lines.get(i).split("\\s+"); for (int j = 1; j < tokens.length; j++) { lookup2.put(tokens[0], tokens[j]); } } // Delete File try { s3client.deleteObject(new DeleteObjectRequest(bucketName, args[4])); } catch (AmazonServiceException ase) { System.out.println("Caught an AmazonServiceException."); System.out.println("Error Message: " + ase.getMessage()); System.out.println("HTTP Status Code: " + ase.getStatusCode()); System.out.println("AWS Error Code: " + ase.getErrorCode()); System.out.println("Error Type: " + ase.getErrorType()); System.out.println("Request ID: " + ase.getRequestId()); } catch (AmazonClientException ace) { System.out.println("Caught an AmazonClientException."); System.out.println("Error Message: " + ace.getMessage()); } /* * Start of IEX-2Lev construction */ // Generation of keys for IEX-2Lev BufferedReader keyRead = new BufferedReader(new InputStreamReader(System.in)); System.out.println("Enter your password :"); String pass = keyRead.readLine(); // You can change the size of the key; Here we set it to 128 List<byte[]> listSK = IEX2Lev.keyGen(128, pass, "salt/salt", 100); // Generation of Local Multi-maps with Mapper job only without reducer Configuration conf3 = new Configuration(); String testSerialization1 = new String(Base64.encodeBase64(Serializer.serialize(lookup))); String testSerialization2 = new String(Base64.encodeBase64(Serializer.serialize(lookup2))); String testSerialization3 = new String(Base64.encodeBase64(Serializer.serialize(listSK))); //String testSerialization2 = gson.toJson(lookup2); conf3.set("lookup", testSerialization1); conf3.set("lookup2", testSerialization2); conf3.set("setKeys", testSerialization3); Job job3 = Job.getInstance(conf3, "Local MM"); job3.setJarByClass(IEX2LevAMAZON.class); job3.setMapperClass(LocalMM.class); job3.setNumReduceTasks(0); FileInputFormat.addInputPath(job3, new Path(args[2])); FileOutputFormat.setOutputPath(job3, new Path(args[3])); job3.waitForCompletion(true); }
From source file:org.cto.VVS3Box.S3Sample.java
License:Open Source License
public static void main(String[] args) throws IOException { /*// www. j a v a 2s . co m * This credentials provider implementation loads your AWS credentials * from a properties file at the root of your classpath. * * Important: Be sure to fill in your AWS access credentials in the * AwsCredentials.properties file before you try to run this * sample. * http://aws.amazon.com/security-credentials */ AmazonS3 s3 = new AmazonS3Client(new ClasspathPropertiesFileCredentialsProvider()); Region usWest2 = Region.getRegion(Regions.US_WEST_2); s3.setRegion(usWest2); String bucketName = "lior.test-" + UUID.randomUUID(); String key = "MyObjectKey"; System.out.println("==========================================="); System.out.println("Getting Started with Amazon S3"); System.out.println("===========================================\n"); try { /* * Create a new S3 bucket - Amazon S3 bucket names are globally unique, * so once a bucket name has been taken by any user, you can't create * another bucket with that same name. * * You can optionally specify a location for your bucket if you want to * keep your data closer to your applications or users. */ System.out.println("Creating bucket " + bucketName + "\n"); s3.createBucket(bucketName); /* * List the buckets in your account */ System.out.println("Listing buckets"); for (Bucket bucket : s3.listBuckets()) { System.out.println(" - " + bucket.getName()); } System.out.println(); /* * Upload an object to your bucket - You can easily upload a file to * S3, or upload directly an InputStream if you know the length of * the data in the stream. You can also specify your own metadata * when uploading to S3, which allows you set a variety of options * like content-type and content-encoding, plus additional metadata * specific to your applications. */ System.out.println("Uploading a new object to S3 from a file\n"); s3.putObject(new PutObjectRequest(bucketName, key, createSampleFile())); /* * Download an object - When you download an object, you get all of * the object's metadata and a stream from which to read the contents. * It's important to read the contents of the stream as quickly as * possibly since the data is streamed directly from Amazon S3 and your * network connection will remain open until you read all the data or * close the input stream. * * GetObjectRequest also supports several other options, including * conditional downloading of objects based on modification times, * ETags, and selectively downloading a range of an object. */ System.out.println("Downloading an object"); S3Object object = s3.getObject(new GetObjectRequest(bucketName, key)); System.out.println("Content-Type: " + object.getObjectMetadata().getContentType()); displayTextInputStream(object.getObjectContent()); /* * List objects in your bucket by prefix - There are many options for * listing the objects in your bucket. Keep in mind that buckets with * many objects might truncate their results when listing their objects, * so be sure to check if the returned object listing is truncated, and * use the AmazonS3.listNextBatchOfObjects(...) operation to retrieve * additional results. */ System.out.println("Listing objects"); ObjectListing objectListing = s3 .listObjects(new ListObjectsRequest().withBucketName(bucketName).withPrefix("My")); for (S3ObjectSummary objectSummary : objectListing.getObjectSummaries()) { System.out.println( " - " + objectSummary.getKey() + " " + "(size = " + objectSummary.getSize() + ")"); } System.out.println(); /* * Delete an object - Unless versioning has been turned on for your bucket, * there is no way to undelete an object, so use caution when deleting objects. */ System.out.println("Deleting an object\n"); s3.deleteObject(bucketName, key); /* * Delete a bucket - A bucket must be completely empty before it can be * deleted, so remember to delete any objects from your buckets before * you try to delete them. */ System.out.println("Deleting bucket " + bucketName + "\n"); s3.deleteBucket(bucketName); } catch (AmazonServiceException ase) { System.out.println("Caught an AmazonServiceException, which means your request made it " + "to Amazon S3, but was rejected with an error response for some reason."); System.out.println("Error Message: " + ase.getMessage()); System.out.println("HTTP Status Code: " + ase.getStatusCode()); System.out.println("AWS Error Code: " + ase.getErrorCode()); System.out.println("Error Type: " + ase.getErrorType()); System.out.println("Request ID: " + ase.getRequestId()); } catch (AmazonClientException ace) { System.out.println("Caught an AmazonClientException, which means the client encountered " + "a serious internal problem while trying to communicate with S3, " + "such as not being able to access the network."); System.out.println("Error Message: " + ace.getMessage()); } }
From source file:org.finra.dm.dao.helper.AwsExceptionRetryAdvice.java
License:Apache License
private boolean isRetryableException(AmazonServiceException ase) { List<String> errorCodesToRetry = dmStringHelper.splitStringWithDefaultDelimiter(getExceptionErrorCodes()); return errorCodesToRetry.contains(ase.getErrorCode()); }