List of usage examples for com.amazonaws.services.s3 AmazonS3Client AmazonS3Client
@Deprecated
public AmazonS3Client()
From source file:com.epam.dlab.module.aws.AdapterS3File.java
License:Apache License
/** * Creates and returns the Amazon client, as well as checks bucket existence. * * @throws AdapterException/*from w ww. ja v a 2 s . co m*/ */ private AmazonS3 getAmazonClient() throws AdapterException { AmazonS3 s3 = (accessKeyId == null ? new AmazonS3Client() : new AmazonS3Client(new BasicAWSCredentials(accessKeyId, secretAccessKey))); if (!s3.doesBucketExist(bucket)) { throw new AdapterException("Bucket \"" + bucket + "\" does not exist."); } return s3; }
From source file:com.facebook.presto.kinesis.util.KinesisTestClientManager.java
License:Apache License
public KinesisTestClientManager() { this.dynamoDBClient = new AmazonDynamoDBClient(); this.amazonS3Client = new AmazonS3Client(); }
From source file:com.github.abhinavmishra14.aws.s3.service.impl.AwsS3IamServiceImpl.java
License:Open Source License
/** * Instantiates a new aws s3 iam service impl. *//* w ww .jav a 2 s . c o m*/ public AwsS3IamServiceImpl() { super(); if (LOGGER.isDebugEnabled()) { LOGGER.debug("AwsS3IamService is initializing using IAM Role.."); } // create a client connection based on IAM role assigned s3client = new AmazonS3Client(); }
From source file:com.indeed.imhotep.iql.cache.S3QueryCache.java
License:Apache License
public S3QueryCache(PropertyResolver props) { String awsRegion;/*from www. ja va 2 s . c o m*/ enabled = true; try { bucket = props.getProperty("query.cache.s3.bucket", String.class); client = new AmazonS3Client(); boolean exists = client.doesBucketExist(bucket); if (!exists) { awsRegion = props.getProperty("aws.s3.region", String.class, Region.US_Standard.toString()); client.createBucket(bucket, awsRegion); } } catch (Exception e) { log.info("Failed to initialize the S3 client. Caching disabled.", e); enabled = false; } }
From source file:com.mycompany.mytubeaws.DownloadServlet.java
@Override public void init() throws ServletException { s3 = new AmazonS3Client(); }
From source file:com.netflix.bdp.s3.S3MultipartOutputCommitter.java
License:Apache License
/** * Returns an {@link AmazonS3} client. This should be overridden by * subclasses to provide access to a configured client. * * @param path the output S3 path (with bucket) * @param conf a Hadoop {@link Configuration} * @return a {@link AmazonS3} client/*from www . ja v a 2s .co m*/ */ protected Object findClient(Path path, Configuration conf) { return new AmazonS3Client(); }
From source file:com.nextdoor.bender.ipc.s3.S3TransportFactory.java
License:Apache License
@Override public void setConf(AbstractConfig config) { this.config = (S3TransportConfig) config; this.client = new AmazonS3Client(); if (this.config.getRegion() != null) { this.client.withRegion(this.config.getRegion()); }/*from w w w. j a va2 s . c o m*/ }
From source file:com.nextdoor.bender.S3SnsNotifier.java
License:Apache License
public static void main(String[] args) throws ParseException, InterruptedException, IOException { formatter = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'").withZoneUTC(); /*/*www .j av a2 s.c om*/ * Parse cli arguments */ Options options = new Options(); options.addOption(Option.builder().longOpt("bucket").hasArg().required() .desc("Name of S3 bucket to list s3 objects from").build()); options.addOption(Option.builder().longOpt("key-file").hasArg().required() .desc("Local file of S3 keys to process").build()); options.addOption( Option.builder().longOpt("sns-arn").hasArg().required().desc("SNS arn to publish to").build()); options.addOption(Option.builder().longOpt("throttle-ms").hasArg() .desc("Amount of ms to wait between publishing to SNS").build()); options.addOption(Option.builder().longOpt("processed-file").hasArg() .desc("Local file to use to store procssed S3 object names").build()); options.addOption(Option.builder().longOpt("skip-processed").hasArg(false) .desc("Whether to skip S3 objects that have been processed").build()); options.addOption( Option.builder().longOpt("dry-run").hasArg(false).desc("If set do not publish to SNS").build()); CommandLineParser parser = new DefaultParser(); CommandLine cmd = parser.parse(options, args); String bucket = cmd.getOptionValue("bucket"); String keyFile = cmd.getOptionValue("key-file"); String snsArn = cmd.getOptionValue("sns-arn"); String processedFile = cmd.getOptionValue("processed-file", null); boolean skipProcessed = cmd.hasOption("skip-processed"); dryRun = cmd.hasOption("dry-run"); long throttle = Long.parseLong(cmd.getOptionValue("throttle-ms", "-1")); if (processedFile != null) { File file = new File(processedFile); if (!file.exists()) { logger.debug("creating local file to store processed s3 object names: " + processedFile); file.createNewFile(); } } /* * Import S3 keys that have been processed */ if (skipProcessed && processedFile != null) { try (BufferedReader br = new BufferedReader(new FileReader(processedFile))) { String line; while ((line = br.readLine()) != null) { alreadyPublished.add(line.trim()); } } } /* * Setup writer for file containing processed S3 keys */ FileWriter fw = null; BufferedWriter bw = null; if (processedFile != null) { fw = new FileWriter(processedFile, true); bw = new BufferedWriter(fw); } /* * Create clients */ AmazonS3Client s3Client = new AmazonS3Client(); AmazonSNSClient snsClient = new AmazonSNSClient(); /* * Get S3 object list */ try (BufferedReader br = new BufferedReader(new FileReader(keyFile))) { String line; while ((line = br.readLine()) != null) { String key = line.trim(); if (alreadyPublished.contains(key)) { logger.info("skipping " + key); } ObjectMetadata om = s3Client.getObjectMetadata(bucket, key); S3EventNotification s3Notification = getS3Notification(key, bucket, om.getContentLength()); String json = s3Notification.toJson(); /* * Publish to SNS */ if (publish(snsArn, json, snsClient, key) && processedFile != null) { bw.write(key + "\n"); bw.flush(); } if (throttle != -1) { Thread.sleep(throttle); } } } if (processedFile != null) { bw.close(); fw.close(); } }
From source file:com.nike.cerberus.module.CerberusModule.java
License:Apache License
private Optional<String> findBucket(final String environmentName) { AmazonS3Client s3Client = new AmazonS3Client(); List<Bucket> buckets = s3Client.listBuckets(); String envBucket = null;//from w w w.j a v a2s . c o m for (final Bucket bucket : buckets) { if (StringUtils.contains(bucket.getName(), ConfigConstants.CONFIG_BUCKET_KEY)) { String[] parts = bucket.getName().split("-"); if (StringUtils.equalsIgnoreCase(environmentName, parts[0])) { envBucket = bucket.getName(); break; } } } return Optional.ofNullable(envBucket); }
From source file:com.simple.sftpfetch.App.java
License:Apache License
private static S3 s3FromProperties(Properties properties) { String s3Bucket = properties.getProperty("s3.bucket"); final String awsAccessKey = properties.getProperty("s3.access.key", ""); final String awsSecretKey = properties.getProperty("s3.secret.key", ""); AmazonS3Client client;/*from w w w. j ava 2 s. co m*/ if (awsAccessKey.isEmpty() || awsSecretKey.isEmpty()) { client = new AmazonS3Client(); } else { client = new AmazonS3Client(new SuppliedAWSCredentials(awsAccessKey, awsSecretKey)); } return new S3(client, s3Bucket); }