List of usage examples for com.amazonaws ClientConfiguration ClientConfiguration
public ClientConfiguration()
From source file:org.systemsbiology.common.ConfigHelper.java
License:Open Source License
public AmazonSimpleWorkflow createSWFClient() { ClientConfiguration config = new ClientConfiguration().withSocketTimeout(70 * 1000); AWSCredentials awsCredentials = new BasicAWSCredentials(this.swfAccessId, this.swfSecretKey); AmazonSimpleWorkflow client = new AmazonSimpleWorkflowClient(awsCredentials, config); client.setEndpoint(this.swfServiceUrl); return client; }
From source file:org.wallride.autoconfigure.WallRideAutoConfiguration.java
License:Apache License
@Bean public AmazonS3 amazonS3() { // final String accessKey = environment.getRequiredProperty("aws.accessKey"); // final String secretKey = environment.getRequiredProperty("aws.secretKey"); ClientConfiguration configuration = new ClientConfiguration(); configuration.setMaxConnections(1000); // return new AmazonS3Client(new BasicAWSCredentials(accessKey, secretKey), configuration); return new AmazonS3Client(configuration); }
From source file:org.wallride.autoconfigure.WallRideInitializer.java
License:Apache License
public static ResourceLoader createResourceLoader() { ClientConfiguration configuration = new ClientConfiguration(); configuration.setMaxConnections(1000); AmazonS3 amazonS3 = new AmazonS3Client(configuration); SimpleStorageResourceLoader resourceLoader = new SimpleStorageResourceLoader(amazonS3); try {/*from w w w .ja v a 2 s .c o m*/ resourceLoader.afterPropertiesSet(); } catch (Exception e) { throw new RuntimeException(e); } return new PathMatchingSimpleStorageResourcePatternResolver(amazonS3, resourceLoader, new PathMatchingResourcePatternResolver()); }
From source file:org.xmlsh.aws.gradle.s3.AmazonS3PluginExtension.java
License:BSD License
private AmazonS3 initClient() { AwsPluginExtension aws = project.getExtensions().getByType(AwsPluginExtension.class); ClientConfiguration clientConfiguration = new ClientConfiguration(); if (maxErrorRetry > 0) clientConfiguration.setMaxErrorRetry(maxErrorRetry); AmazonS3Client client = aws.createClient(AmazonS3Client.class, profileName, clientConfiguration); if (region != null) { client.setRegion(RegionUtils.getRegion(region)); }//from w ww . j ava 2s . co m return client; }
From source file:oulib.aws.Main.java
public static void main(String[] args) { try {//from w w w . j a v a 2 s .c o m AWSCredentials credentials = null; AmazonS3 s3Client = null; // args = new String[4]; // args[0] = "ul-bagit"; // args[1] = "ul-ir-workspace"; // args[2] = "Borelli_1680-1681"; // args[3] = "6"; try { credentials = new ProfileCredentialsProvider("default").getCredentials(); } catch (Exception e) { String access_key_id = null; String secret_key_id = null; String credentialInfo = AwsUtil.getAwsCredentials(); ObjectMapper mapper = new ObjectMapper(); Map<String, String> credentialInfoMap = new HashMap<>(); credentialInfoMap = mapper.readValue(credentialInfo, HashMap.class); for (String key : credentialInfoMap.keySet()) { if ("AccessKeyId".equals(key)) { access_key_id = credentialInfoMap.get(key); } else if ("SecretAccessKey".equals(key)) { secret_key_id = credentialInfoMap.get(key); } } // System.out.println("access_key_id = "+access_key_id+" access_key_id = "+access_key_id); if (null != access_key_id && null != secret_key_id) { credentials = new BasicAWSCredentials(access_key_id, secret_key_id); // s3Client = AmazonS3ClientBuilder.standard().withCredentials(new AWSStaticCredentialsProvider(awsCreds)).build(); } else { throw new AmazonClientException("Cannot load the credentials from the credential information. " + "Please make sure that your credentials file is at the correct, and is in valid format.", e); } } ClientConfiguration config = new ClientConfiguration(); config.setConnectionTimeout(250000); config.setSocketTimeout(50000); s3Client = new AmazonS3Client(credentials, config); Region usEast = Region.getRegion(Regions.US_EAST_1); s3Client.setRegion(usEast); String bookName = args[2]; S3BookInfo bookInfo = new S3BookInfo(); bookInfo.setBookName(bookName); bookInfo.setBucketSourceName(args[0]); bookInfo.setBucketTargetName(args[1]); bookInfo.setCompressionSize(15000000); // *** Generate metadadta ***** // S3Util.copyS3ObjectTiffMetadata(s3client, "ul-bagit", "ul-ir-workspace", "Zuniga_1591/data/004.tif", "Zuniga_1591/data/004.tif"); // S3Util.copyS3ObjectTiffMetadata(s3client, "ul-bagit", "ul-ir-workspace", "Zuniga_1591/data/004.tif", "Zuniga_1591/data/004-20.tif"); // S3Util.copyS3ObjectTiffMetadata(s3client, "ul-bagit", "ul-ir-workspace", "Zuniga_1591/data/004.tif", "Zuniga_1591/data/004-50.tif"); // *** Generate small tiffs ***** Integer threadMaxCount = 0; try { threadMaxCount = Integer.valueOf(args[3]); } catch (Exception ex) { ex.printStackTrace();//logger.error("Cannot parse the thread count! "+ex.getMessage()); return; } System.out.println( "arg0 = " + args[0] + " arg1 = " + args[1] + " arg2 = " + args[2] + " arg3 = " + args[3]); ExecutorService executor = Executors.newFixedThreadPool(threadMaxCount); List<String> tiffDiff = S3Util.getBucketObjectKeyList(bookInfo.getBucketSourceName(), args[2], s3Client);//.getS3BucketFolderObjDiff(s3Client, args[0], bookName+"/data", args[1], bookName+"/data"); int diff = tiffDiff.size(); if (diff > 0) { System.out.println("There are totally " + String.valueOf(diff) + " tiff images to process.\nStart processing at " + (new java.util.Date()).toString()); AwsDataProcessorThreadFactory threadFactory = new AwsDataProcessorThreadFactory(); for (int i = 0; i <= 10; i++) { // S3TiffProcessorThread s3TiffProcessorThread = new S3TiffProcessorThread(s3Client, bookInfo, String.valueOf(i)+".tif", tiffDiff); // threadFactory.setIndex(i); // threadFactory.setJobType("small-tiff-" + bookName); // executor.execute(threadFactory.newThread(s3TiffProcessorThread)); // System.out.println("obj has path = "+bookInfo.getBucketSourceName() + tiffDiff.get(i)); S3TiffMetadataProcessorThread thread = new S3TiffMetadataProcessorThread(s3Client, bookInfo, String.valueOf(i) + ".tif", tiffDiff); threadFactory.setIndex(i); threadFactory.setJobType("tiff-metadata-" + bookName); executor.execute(threadFactory.newThread(thread)); } } else { System.out.println("There are no tiff images to process"); } executor.shutdown(); while (!executor.isTerminated()) { } System.out.println("All the derivatives were generated at " + (new java.util.Date()).toString() + "!"); } catch (Exception ex) { ex.printStackTrace();//logger.error("Cannot finish generating the small tiff images" + ex.getMessage()); } }
From source file:support.SQS.java
License:Open Source License
SQS() { try {/* www . j a va2 s.co m*/ credentials = new ProfileCredentialsProvider(key_path, "default").getCredentials(); } catch (Exception e) { throw new AmazonClientException("Cannot load the credentials from the credential profiles file. " + "Please make sure that your credentials file is at the correct " + "location (C:\\Users\\Sayon\\.aws\\credentials), and is in valid format.", e); } ClientConfiguration clientConfiguration = new ClientConfiguration(); // clientConfiguration.setProxyHost(proxy_host); // clientConfiguration.setProxyPort(proxy_port); // clientConfiguration.setProxyUsername(proxy_username); // clientConfiguration.setProxyPassword(proxy_password); // sqs = new AmazonSQSClient(credentials, clientConfiguration); Region usWest2 = Region.getRegion(Regions.US_WEST_2); sqs.setRegion(usWest2); }
From source file:usingflume.ch05.S3Sink.java
License:Apache License
@Override public void start() { // Set up Amazon S3 client. AWSCredentials credentials = new BasicAWSCredentials(awsAccessKeyId, awsSecretKey); ClientConfiguration config = new ClientConfiguration(); config.setProtocol(Protocol.HTTP);/*w w w . j av a 2s .co m*/ connection = new AmazonS3Client(credentials, config); connection.setEndpoint(endPoint); if (!connection.doesBucketExist(bucket)) { connection.createBucket(bucket); } super.start(); }