List of usage examples for com.amazonaws.services.s3 AmazonS3Client AmazonS3Client
@SdkInternalApi AmazonS3Client(AmazonS3ClientParams s3ClientParams)
From source file:com.facebook.presto.rakam.S3BackupStore.java
License:Apache License
@Inject public S3BackupStore(S3BackupConfig config) { this.config = config; s3Client = new AmazonS3Client(config.getCredentials()); s3Client.setRegion(config.getAWSRegion()); if (config.getEndpoint() != null) { s3Client.setEndpoint(config.getEndpoint()); }/* w ww .java 2 s . c o m*/ }
From source file:com.fivepebbles.ProcessAWS.java
License:MIT License
ProcessAWS(String keyId, String secKey) { this.myCredentials = new BasicAWSCredentials(keyId, secKey); this.s3client = new AmazonS3Client(myCredentials); }
From source file:com.flipzu.PostProcThread.java
License:Apache License
private boolean uploadToS3(Broadcast bcast, boolean delete) { debug.logPostProc("PostProcThread, S3 upload for " + bcast.getFilename()); if (bcast.getFilename() == null) { debug.logPostProc("PostProcThread, uploadToS3, filename is null"); return false; }// w ww. j a v a2 s . c o m File file = new File(bcast.getFilename()); if (!file.exists()) { debug.logPostProc("PostProcThread, uploadToS3, " + bcast.getFilename() + " does not exist"); return false; } AmazonS3 s3 = null; try { InputStream is = new FileInputStream("aws.properties"); s3 = new AmazonS3Client(new PropertiesCredentials(is)); } catch (Exception e) { Debug.getInstance().logError("uploadToS3 Error ", e); return false; } String bucketName = Config.getInstance().getS3Bucket(); String dirName = Config.getInstance().getS3dir(); String objName = dirName + "/" + bcast.getId() + Config.getInstance().getFileWriterExtension(); PutObjectRequest po = new PutObjectRequest(bucketName, objName, file); ObjectMetadata metadata = new ObjectMetadata(); metadata.setContentType("audio/mpeg"); po.setMetadata(metadata); po.setCannedAcl(CannedAccessControlList.PublicRead); try { s3.putObject(po); } catch (AmazonServiceException ase) { debug.logPostProc("Caught an AmazonServiceException, which means your request made it " + "to Amazon S3, but was rejected with an error response for some reason."); debug.logPostProc("Error Message: " + ase.getMessage()); debug.logPostProc("HTTP Status Code: " + ase.getStatusCode()); debug.logPostProc("AWS Error Code: " + ase.getErrorCode()); debug.logPostProc("Error Type: " + ase.getErrorType()); debug.logPostProc("Request ID: " + ase.getRequestId()); return false; } catch (AmazonClientException ace) { debug.logPostProc("Caught an AmazonClientException, which means the client encountered " + "a serious internal problem while trying to communicate with S3, " + "such as not being able to access the network."); debug.logPostProc("Error Message: " + ace.getMessage()); return false; } if (delete) { if (Config.getInstance().deleteSmallBcasts()) /* check and remove empty/short broadcasts */ cleanCrappyBroadcasts(bcast.getKey(), file); debug.logPostProc("uploadToS3, deleting file " + bcast.getFilename()); file.delete(); } return true; }
From source file:com.flipzu.PostProcThread.java
License:Apache License
private void consolidateS3(Broadcast bcast) { debug.logPostProc("PostProcThread, consolidate S3 for " + bcast); File file = new File(bcast.getFilename()); if (!file.exists()) { debug.logPostProc("consolidateS3, empty broadcast, doing nothing"); return;/*from w w w . j a v a 2 s . co m*/ } AmazonS3 s3 = null; try { InputStream is = new FileInputStream("aws.properties"); s3 = new AmazonS3Client(new PropertiesCredentials(is)); } catch (Exception e) { debug.logError("consolidateS3 Error ", e); return; } String bucketName = Config.getInstance().getS3Bucket(); String dirName = Config.getInstance().getS3dir(); String objName = dirName + "/" + bcast.getId() + Config.getInstance().getFileWriterExtension(); S3Object obj = null; try { obj = s3.getObject(bucketName, objName); } catch (AmazonServiceException ase) { debug.logPostProc("consolidateS3 for " + bcast + ". File not found, doing nothing..."); return; } catch (AmazonClientException ace) { debug.logPostProc("consolidateS3 for " + bcast + ". File not found, doing nothing..."); return; } if (obj == null) { debug.logPostProc("consolidateS3 for " + bcast + ". File not found, doing nothing."); return; } debug.logPostProc("consolidateS3 for " + bcast + ". File found, consolidating."); String auxFile = Config.getInstance().getFileWriterDestDir() + "/" + bcast.getId() + "-aux" + Config.getInstance().getFileWriterExtension(); BufferedOutputStream bosAux = null; try { FileOutputStream fos = new FileOutputStream(auxFile); bosAux = new BufferedOutputStream(fos); } catch (FileNotFoundException e) { debug.logError("consolidateS3 for, error creating output stream", e); return; } BufferedInputStream is = new BufferedInputStream(obj.getObjectContent()); /* fetch file from S3 */ int r = 0; do { byte[] b = new byte[1024]; try { r = is.read(b); if (r > 0) bosAux.write(b, 0, r); } catch (IOException e) { debug.logError("consolidateS3 error", e); /* cleanup */ File aFile = new File(auxFile); aFile.delete(); return; } } while (r > 0); try { is.close(); } catch (IOException e) { debug.logError("consolidateS3 error", e); } /* append our file to aux file */ BufferedInputStream bis; try { FileInputStream fis = new FileInputStream(bcast.getFilename()); bis = new BufferedInputStream(fis); } catch (FileNotFoundException e) { debug.logPostProc("consolidateS3 error, FileNotFoundException"); return; } r = 0; do { byte[] b = new byte[1024]; try { r = bis.read(b); bosAux.write(b); } catch (IOException e) { debug.logError("consolidateS3 error", e); return; } } while (r > 0); try { bis.close(); bosAux.close(); } catch (IOException e) { debug.logError("consolidateS3 error", e); } /* delete old crap */ file.delete(); bcast.setFilename(auxFile); debug.logPostProc("consolidateS3 for " + bcast + ". File consolidated in " + bcast.getFilename()); return; }
From source file:com.ge.predix.sample.blobstore.config.LocalConfig.java
License:Apache License
@Bean public BlobstoreService objectStoreService() { log.info("objectStoreService(): " + objectStoreProperties.getAccessKey() + objectStoreProperties.getSecretKey() + ", " + objectStoreProperties.getBucket()); AmazonS3Client s3Client = new AmazonS3Client(new BasicAWSCredentials(objectStoreProperties.getAccessKey(), objectStoreProperties.getSecretKey())); s3Client.setEndpoint(objectStoreProperties.getUrl()); try {// w ww . ja v a 2s .c o m // Remove the Credentials from the Object Store URL URL url = new URL(objectStoreProperties.getUrl()); String urlWithoutCredentials = url.getProtocol() + "://" + url.getHost(); // Return BlobstoreService return new BlobstoreService(s3Client, objectStoreProperties.getBucket(), urlWithoutCredentials); } catch (MalformedURLException e) { log.error("create(): Couldnt parse the URL provided by VCAP_SERVICES. Exception = " + e.getMessage()); throw new RuntimeException("Blobstore URL is Invalid", e); } }
From source file:com.gemmystar.api.contents.S3UploadScheduledTask.java
License:Open Source License
public void uploadToS3(File uploadedFile) { try {// w w w . j a v a 2 s.c o m AmazonS3 s3client = new AmazonS3Client(new ProfileCredentialsProvider()); LOGGER.debug("Uploading a new object to S3 from a {}", uploadedFile.getName()); String keyName = GemmyConstant.S3_KEY_PREFIX_VIDEO + uploadedFile.getName(); s3client.putObject(new PutObjectRequest(s3BucketName, keyName, uploadedFile)); contentsService.saveS3Key(getContentsId(uploadedFile.getName()), keyName); LOGGER.debug("upload success."); uploadedFile.delete(); } catch (Exception e) { LOGGER.error(e.toString(), e); } }
From source file:com.gendevs.bedrock.appengine.service.storage.StorageProvider.java
License:Apache License
private void initalizeS3() { AWSCredentials credentials = new BasicAWSCredentials(StorageConstants.ACCESS_KEY, StorageConstants.SECRET_KEY); if (s3Client == null) { s3Client = new AmazonS3Client(credentials); s3Client.setRegion(region);/*w ww .j a va 2s . c om*/ s3Client.setEndpoint("s3-us-west-2.amazonaws.com"); } manager = new TransferManager(credentials); }
From source file:com.github.abhinavmishra14.aws.s3.service.impl.AwsS3IamServiceImpl.java
License:Open Source License
/** * Inits the service.//w w w. j a v a 2s .c o m */ private void initService() { AWSUtil.notNull(accessKey, AWSUtilConstants.ERR_MSG_ACCESSKEY); AWSUtil.notNull(secretKey, AWSUtilConstants.ERR_MSG_SECRETKEY); // credentials object identifying user for authentication // user must have AWSConnector and AmazonS3FullAccess for // this example to work if (LOGGER.isDebugEnabled()) { LOGGER.debug("AwsS3IamService is initializing using keys.."); } final AWSCredentials credentials = new BasicAWSCredentials(accessKey, secretKey); // create a client connection based on credentials s3client = new AmazonS3Client(credentials); }
From source file:com.github.jramos.snowplow.RedshiftSinkEmitter.java
License:Apache License
public RedshiftSinkEmitter(KinesisConnectorConfiguration kinesisConfiguration) { this.configuration = (RedshiftSinkConfiguration) kinesisConfiguration; s3Bucket = configuration.S3_BUCKET;//from w w w. j a va2 s . com s3Endpoint = configuration.S3_ENDPOINT; s3client = new AmazonS3Client(configuration.AWS_CREDENTIALS_PROVIDER); if (s3Endpoint != null) { s3client.setEndpoint(s3Endpoint); } redshiftTable = configuration.REDSHIFT_DATA_TABLE; redshiftDelimiter = configuration.REDSHIFT_DATA_DELIMITER; redshiftURL = configuration.REDSHIFT_URL; loginProperties = new Properties(); loginProperties.setProperty("user", configuration.REDSHIFT_USERNAME); loginProperties.setProperty("password", configuration.REDSHIFT_PASSWORD); String localTempDir = System.getProperty("java.io.tmpdir") + File.separator + configuration.S3_BUCKET; tempDir = new File(localTempDir); if (!tempDir.exists()) { tempDir.mkdirs(); } LOG.info("Local S3 temp directory " + tempDir.getAbsolutePath()); }
From source file:com.github.lbroudoux.elasticsearch.river.s3.connector.S3Connector.java
License:Apache License
/** * Connect to the specified bucket using previously given accesskey and secretkey. * @param bucketName Name of the bucket to connect to * @param pathPrefix Prefix that will be later used for filtering documents * @throws AmazonS3Exception when access or secret keys are wrong or bucket does not exists *//*w w w.j ava2 s. c o m*/ public void connectUserBucket(String bucketName, String pathPrefix) throws AmazonS3Exception { this.bucketName = bucketName; this.pathPrefix = pathPrefix; AWSCredentials credentials = new BasicAWSCredentials(accessKey, secretKey); s3Client = new AmazonS3Client(credentials); // Getting location seems odd as we don't use it later and doesBucketExists() seems // more appropriate... However, this later returns true even for non existing buckets ! s3Client.getBucketLocation(bucketName); }