List of usage examples for com.amazonaws.regions Region getRegion
public static Region getRegion(Regions region)
From source file:org.zalando.stups.fullstop.events.CloudtrailEventSupport.java
License:Apache License
public static Region getRegion(final String regionString) { checkState(!isNullOrEmpty(regionString), REGION_STRING_SHOULD_NEVER_BE_NULL_OR_EMPTY); return Region.getRegion(Regions.fromName(regionString)); }
From source file:org.zalando.stups.fullstop.jobs.IdentityManagementDataSource.java
License:Apache License
List<Tuple<String, ListAccessKeysResult>> getListAccessKeysResultPerAccountWithTuple() { List<Tuple<String, ListAccessKeysResult>> result = newArrayList(); for (String accountId : getAccountIds()) { AmazonIdentityManagementClient identityClient = clientProvider.getClient( AmazonIdentityManagementClient.class, accountId, Region.getRegion(Regions.EU_WEST_1)); if (identityClient != null) { result.add(new Tuple<>(accountId, identityClient.listAccessKeys())); } else {//from w w w . java2s .c o m logger.error("Could not create 'AmazonIdentityManagementClient' for accountId : {}", accountId); } } return result; }
From source file:org.zalando.stups.fullstop.jobs.IdentityManagementDataSource.java
License:Apache License
List<Tuple<String, ListUsersResult>> getListUsersResultPerAccountWithTuple() { List<Tuple<String, ListUsersResult>> result = newArrayList(); for (String accountId : getAccountIds()) { AmazonIdentityManagementClient identityClient = clientProvider.getClient( AmazonIdentityManagementClient.class, accountId, Region.getRegion(Regions.EU_WEST_1)); if (identityClient != null) { result.add(new Tuple<>(accountId, identityClient.listUsers())); } else {//from w w w.j a v a2 s . c o m logger.error("Could not create 'AmazonIdentityManagementClient' for accountId : {}", accountId); } } return result; }
From source file:org.zalando.stups.fullstop.plugin.AmiPlugin.java
License:Apache License
@Override public void processEvent(final CloudTrailEvent event) { List<String> amis = getAmis(event); final List<String> whitelistedAmis = Lists.newArrayList(); AmazonEC2Client ec2Client = cachingClientProvider.getClient(AmazonEC2Client.class, whitelistedAmiAccount, Region.getRegion(Regions.fromName(event.getEventData().getAwsRegion()))); DescribeImagesRequest describeImagesRequest = new DescribeImagesRequest().withOwners(whitelistedAmiAccount); DescribeImagesResult describeImagesResult = ec2Client.describeImages(describeImagesRequest); List<Image> images = describeImagesResult.getImages(); whitelistedAmis.addAll(images.stream().filter(image -> image.getName().startsWith(amiNameStartWith)) .map(Image::getImageId).collect(Collectors.toList())); List<String> invalidAmis = Lists.newArrayList(); for (String ami : amis) { boolean valid = false; for (String whitelistedAmi : whitelistedAmis) { if (ami.equals(whitelistedAmi)) { valid = true;//from w ww . j a v a 2s . co m } } if (!valid) { invalidAmis.add(ami); } } if (!CollectionUtils.isEmpty(invalidAmis)) { violationStore .save(new ViolationBuilder(format("Instances with ids: %s was started with wrong images: %s", getInstanceIds(event), invalidAmis)).withEvent(event).build()); } }
From source file:org.zalando.stups.fullstop.plugin.example.ExamplePlugin.java
License:Apache License
@Override // @HystrixCommand(fallback = my coole exception) // command for account id and client type -> generate new credentials public void processEvent(final CloudTrailEvent event) { String parameters = event.getEventData().getRequestParameters(); String instanceId = getFromParameters(parameters); AmazonEC2Client client = getClientForAccount(event.getEventData().getUserIdentity().getAccountId(), Region.getRegion(Regions.fromName(event.getEventData().getAwsRegion()))); DescribeInstancesRequest request = new DescribeInstancesRequest(); request.setInstanceIds(Collections.singleton(instanceId)); // try//from w ww.ja v a 2 s .c om DescribeInstancesResult result = client.describeInstances(request); // catch credentials are old // throw new my coole exception ( account id, CLIENTTYPE.EC2, exception) -> this will trigger hystrix LOG.info("SAVING RESULT INTO MAGIC DB", result); }
From source file:org.zalando.stups.fullstop.plugin.RegistryPlugin.java
License:Apache License
private Map getUserData(final CloudTrailEvent event, final String instanceId) { AmazonEC2Client ec2Client = cachingClientProvider.getClient(AmazonEC2Client.class, event.getEventData().getUserIdentity().getAccountId(), Region.getRegion(Regions.fromName(event.getEventData().getAwsRegion()))); DescribeInstanceAttributeRequest describeInstanceAttributeRequest = new DescribeInstanceAttributeRequest(); describeInstanceAttributeRequest.setInstanceId(instanceId); describeInstanceAttributeRequest.setAttribute(USER_DATA); DescribeInstanceAttributeResult describeInstanceAttributeResult; try {/* w ww. jav a 2 s. co m*/ describeInstanceAttributeResult = ec2Client.describeInstanceAttribute(describeInstanceAttributeRequest); } catch (AmazonServiceException e) { LOG.error(e.getMessage()); violationStore .save(new ViolationBuilder(format("InstanceId: %s doesn't have any userData.", instanceId)) .withEvent(event).build()); return null; } String userData = describeInstanceAttributeResult.getInstanceAttribute().getUserData(); if (userData == null) { violationStore .save(new ViolationBuilder(format("InstanceId: %s doesn't have any userData.", instanceId)) .withEvent(event).build()); return null; } byte[] bytesUserData = Base64.decode(userData); String decodedUserData = new String(bytesUserData); Yaml yaml = new Yaml(); return (Map) yaml.load(decodedUserData); }
From source file:org.zalando.stups.fullstop.plugin.SubnetPlugin.java
License:Apache License
@Override public void processEvent(final CloudTrailEvent event) { List<String> subnetIds = newArrayList(); List<Filter> SubnetIdFilters = newArrayList(); DescribeInstancesRequest describeInstancesRequest = new DescribeInstancesRequest(); List<String> instanceIds = getInstanceIds(event); AmazonEC2Client amazonEC2Client = cachingClientProvider.getClient(AmazonEC2Client.class, event.getEventData().getAccountId(), Region.getRegion(Regions.fromName(event.getEventData().getAwsRegion()))); DescribeInstancesResult describeInstancesResult = null; try {/*from w w w. j a v a 2 s . c o m*/ describeInstancesResult = amazonEC2Client .describeInstances(describeInstancesRequest.withInstanceIds(instanceIds)); } catch (AmazonServiceException e) { violationStore.save(new ViolationBuilder(e.getMessage()).withEvent(event).build()); return; } List<Reservation> reservations = describeInstancesResult.getReservations(); for (Reservation reservation : reservations) { List<Instance> instances = reservation.getInstances(); subnetIds.addAll(instances.stream().map(Instance::getSubnetId).collect(Collectors.toList())); } SubnetIdFilters.add(new Filter().withName("association.subnet-id").withValues(subnetIds)); // filter by subnetId DescribeRouteTablesRequest describeRouteTablesRequest = new DescribeRouteTablesRequest() .withFilters(SubnetIdFilters); DescribeRouteTablesResult describeRouteTablesResult = amazonEC2Client .describeRouteTables(describeRouteTablesRequest); List<RouteTable> routeTables = describeRouteTablesResult.getRouteTables(); if (routeTables == null || routeTables.size() == 0) { violationStore.save(new ViolationBuilder( format("Instances %s have no routing information associated", instanceIds.toString())) .withEvent(event).build()); return; } for (RouteTable routeTable : routeTables) { List<Route> routes = routeTable.getRoutes(); routes.stream() .filter(route -> route.getState().equals("active") && route.getNetworkInterfaceId() != null && !route.getNetworkInterfaceId().startsWith("eni")) .forEach(route -> violationStore.save( new ViolationBuilder(format("ROUTES: instance %s is running in a public subnet %s", route.getInstanceId(), route.getNetworkInterfaceId())).withEvent(event) .build())); } }
From source file:oulib.aws.Main.java
public static void main(String[] args) { try {//from www . j a v a 2s . co m AWSCredentials credentials = null; AmazonS3 s3Client = null; // args = new String[4]; // args[0] = "ul-bagit"; // args[1] = "ul-ir-workspace"; // args[2] = "Borelli_1680-1681"; // args[3] = "6"; try { credentials = new ProfileCredentialsProvider("default").getCredentials(); } catch (Exception e) { String access_key_id = null; String secret_key_id = null; String credentialInfo = AwsUtil.getAwsCredentials(); ObjectMapper mapper = new ObjectMapper(); Map<String, String> credentialInfoMap = new HashMap<>(); credentialInfoMap = mapper.readValue(credentialInfo, HashMap.class); for (String key : credentialInfoMap.keySet()) { if ("AccessKeyId".equals(key)) { access_key_id = credentialInfoMap.get(key); } else if ("SecretAccessKey".equals(key)) { secret_key_id = credentialInfoMap.get(key); } } // System.out.println("access_key_id = "+access_key_id+" access_key_id = "+access_key_id); if (null != access_key_id && null != secret_key_id) { credentials = new BasicAWSCredentials(access_key_id, secret_key_id); // s3Client = AmazonS3ClientBuilder.standard().withCredentials(new AWSStaticCredentialsProvider(awsCreds)).build(); } else { throw new AmazonClientException("Cannot load the credentials from the credential information. " + "Please make sure that your credentials file is at the correct, and is in valid format.", e); } } ClientConfiguration config = new ClientConfiguration(); config.setConnectionTimeout(250000); config.setSocketTimeout(50000); s3Client = new AmazonS3Client(credentials, config); Region usEast = Region.getRegion(Regions.US_EAST_1); s3Client.setRegion(usEast); String bookName = args[2]; S3BookInfo bookInfo = new S3BookInfo(); bookInfo.setBookName(bookName); bookInfo.setBucketSourceName(args[0]); bookInfo.setBucketTargetName(args[1]); bookInfo.setCompressionSize(15000000); // *** Generate metadadta ***** // S3Util.copyS3ObjectTiffMetadata(s3client, "ul-bagit", "ul-ir-workspace", "Zuniga_1591/data/004.tif", "Zuniga_1591/data/004.tif"); // S3Util.copyS3ObjectTiffMetadata(s3client, "ul-bagit", "ul-ir-workspace", "Zuniga_1591/data/004.tif", "Zuniga_1591/data/004-20.tif"); // S3Util.copyS3ObjectTiffMetadata(s3client, "ul-bagit", "ul-ir-workspace", "Zuniga_1591/data/004.tif", "Zuniga_1591/data/004-50.tif"); // *** Generate small tiffs ***** Integer threadMaxCount = 0; try { threadMaxCount = Integer.valueOf(args[3]); } catch (Exception ex) { ex.printStackTrace();//logger.error("Cannot parse the thread count! "+ex.getMessage()); return; } System.out.println( "arg0 = " + args[0] + " arg1 = " + args[1] + " arg2 = " + args[2] + " arg3 = " + args[3]); ExecutorService executor = Executors.newFixedThreadPool(threadMaxCount); List<String> tiffDiff = S3Util.getBucketObjectKeyList(bookInfo.getBucketSourceName(), args[2], s3Client);//.getS3BucketFolderObjDiff(s3Client, args[0], bookName+"/data", args[1], bookName+"/data"); int diff = tiffDiff.size(); if (diff > 0) { System.out.println("There are totally " + String.valueOf(diff) + " tiff images to process.\nStart processing at " + (new java.util.Date()).toString()); AwsDataProcessorThreadFactory threadFactory = new AwsDataProcessorThreadFactory(); for (int i = 0; i <= 10; i++) { // S3TiffProcessorThread s3TiffProcessorThread = new S3TiffProcessorThread(s3Client, bookInfo, String.valueOf(i)+".tif", tiffDiff); // threadFactory.setIndex(i); // threadFactory.setJobType("small-tiff-" + bookName); // executor.execute(threadFactory.newThread(s3TiffProcessorThread)); // System.out.println("obj has path = "+bookInfo.getBucketSourceName() + tiffDiff.get(i)); S3TiffMetadataProcessorThread thread = new S3TiffMetadataProcessorThread(s3Client, bookInfo, String.valueOf(i) + ".tif", tiffDiff); threadFactory.setIndex(i); threadFactory.setJobType("tiff-metadata-" + bookName); executor.execute(threadFactory.newThread(thread)); } } else { System.out.println("There are no tiff images to process"); } executor.shutdown(); while (!executor.isTerminated()) { } System.out.println("All the derivatives were generated at " + (new java.util.Date()).toString() + "!"); } catch (Exception ex) { ex.printStackTrace();//logger.error("Cannot finish generating the small tiff images" + ex.getMessage()); } }
From source file:oulib.aws.s3.S3TiffProcessor.java
/** * * @param bookInfo : contains the information of the source bucket name, target bucket name, and the name of the book * @param context : lambda function runtime context * @return :/* ww w . ja va 2 s. c om*/ * */ @Override public String handleRequest(S3BookInfo bookInfo, Context context) { AmazonS3 s3client = new AmazonS3Client(); Region usEast = Region.getRegion(Regions.US_EAST_1); s3client.setRegion(usEast); try { String sourceBucketName = bookInfo.getBucketSourceName(); String targetBucketName = bookInfo.getBucketTargetName(); String bookName = bookInfo.getBookName(); // Every book has a folder in the target bucket: Map targetBucketKeyMap = S3Util.getBucketObjectKeyMap(targetBucketName, bookName, s3client); if (!S3Util.folderExitsts(bookName, targetBucketKeyMap)) { S3Util.createFolder(targetBucketName, bookName, s3client); } final ListObjectsV2Request req = new ListObjectsV2Request().withBucketName(sourceBucketName) .withPrefix(bookName + "/data/"); ListObjectsV2Result result; do { result = s3client.listObjectsV2(req); for (S3ObjectSummary objectSummary : result.getObjectSummaries()) { String key = objectSummary.getKey(); if (key.endsWith(".tif") && !targetBucketKeyMap.containsKey(key + ".tif")) { S3Object object = s3client.getObject(new GetObjectRequest(sourceBucketName, key)); System.out.println("Start to generate smaller tif image for the object " + key); S3Util.generateSmallTiffWithTargetSize(s3client, object, targetBucketName, bookInfo.getCompressionSize()); // S3Util.copyS3ObjectTiffMetadata(s3client, object, s3client.getObject(new GetObjectRequest(targetBucketName, key)), targetBucketName, key+".tif"); System.out.println("Finished to generate smaller tif image for the object " + key + ".tif"); // break; } } System.out.println("Next Continuation Token : " + result.getNextContinuationToken()); req.setContinuationToken(result.getNextContinuationToken()); } while (result.isTruncated() == true); } catch (AmazonServiceException ase) { System.out.println( "Caught an AmazonServiceException, which means your request made it to Amazon S3, but was rejected with an error response for some reason."); System.out.println("Error Message: " + ase.getMessage()); System.out.println("HTTP Status Code: " + ase.getStatusCode()); System.out.println("AWS Error Code: " + ase.getErrorCode()); System.out.println("Error Type: " + ase.getErrorType()); System.out.println("Request ID: " + ase.getRequestId()); } catch (AmazonClientException ace) { System.out.println( "Caught an AmazonClientException, which means the client encountered an internal error while trying to communicate with S3, \nsuch as not being able to access the network."); System.out.println("Error Message: " + ace.getMessage()); } return null; }
From source file:oulib.aws.s3.S3Util.java
/** * Use the default approach to get a AWS S3 client with the default region of east. * //w w w . j a v a 2 s . c om * @return AmazonS3 : s3 client */ public static AmazonS3 getS3AwsClient() { AWSCredentials credentials = null; try { ProfileCredentialsProvider provider = new ProfileCredentialsProvider("default"); credentials = provider.getCredentials(); if (null == credentials) { throw new InvalidS3CredentialsException("Invalid credentials with default approach!"); } } catch (InvalidS3CredentialsException | AmazonClientException e) { throw new AmazonClientException("Cannot load the credentials from the credential profiles file. " + "Please make sure that your credentials file is at the correct " + "location (/Users/zhao0677/.aws/credentials), and is in valid format.", e); } AmazonS3 s3client = new AmazonS3Client(credentials); Region usEast = Region.getRegion(Regions.US_EAST_1); s3client.setRegion(usEast); return s3client; }