List of usage examples for com.amazonaws.services.s3.model ObjectMetadata addUserMetadata
public void addUserMetadata(String key, String value)
Adds the key value pair of custom user-metadata for the associated object.
From source file:com.netflix.hollow.example.producer.infrastructure.S3Publisher.java
License:Apache License
@Override public void publishReverseDelta(File reverseDeltaFile, long previousVersion, long currentVersion) { String objectName = getS3ObjectName(blobNamespace, "reversedelta", currentVersion); ObjectMetadata metadata = new ObjectMetadata(); metadata.addUserMetadata("from_state", String.valueOf(currentVersion)); metadata.addUserMetadata("to_state", String.valueOf(previousVersion)); metadata.setHeader("Content-Length", reverseDeltaFile.length()); uploadFile(reverseDeltaFile, objectName, metadata); }
From source file:com.openkm.util.backup.RepositoryS3Backup.java
License:Open Source License
/** * Performs a recursive repository content export with metadata */// w ww. jav a2 s . com private static ImpExpStats backupHelper(String token, String fldPath, AmazonS3 s3, String bucket, boolean metadata, Writer out, InfoDecorator deco) throws FileNotFoundException, PathNotFoundException, AccessDeniedException, ParseException, NoSuchGroupException, RepositoryException, IOException, DatabaseException { log.info("backup({}, {}, {}, {}, {}, {})", new Object[] { token, fldPath, bucket, metadata, out, deco }); ImpExpStats stats = new ImpExpStats(); DocumentModule dm = ModuleManager.getDocumentModule(); FolderModule fm = ModuleManager.getFolderModule(); MetadataAdapter ma = MetadataAdapter.getInstance(token); Gson gson = new Gson(); for (Iterator<Document> it = dm.getChildren(token, fldPath).iterator(); it.hasNext();) { File tmpDoc = null; InputStream is = null; FileOutputStream fos = null; boolean upload = true; try { Document docChild = it.next(); String path = docChild.getPath().substring(1); ObjectMetadata objMeta = new ObjectMetadata(); if (Config.REPOSITORY_CONTENT_CHECKSUM) { if (exists(s3, bucket, path)) { objMeta = s3.getObjectMetadata(bucket, path); if (docChild.getActualVersion().getChecksum().equals(objMeta.getETag())) { upload = false; } } } if (upload) { tmpDoc = FileUtils.createTempFileFromMime(docChild.getMimeType()); fos = new FileOutputStream(tmpDoc); is = dm.getContent(token, docChild.getPath(), false); IOUtils.copy(is, fos); PutObjectRequest request = new PutObjectRequest(bucket, path, tmpDoc); if (metadata) { // Metadata DocumentMetadata dmd = ma.getMetadata(docChild); String json = gson.toJson(dmd); objMeta.addUserMetadata("okm", json); } request.setMetadata(objMeta); s3.putObject(request); out.write(deco.print(docChild.getPath(), docChild.getActualVersion().getSize(), null)); out.flush(); } else { if (metadata) { // Metadata DocumentMetadata dmd = ma.getMetadata(docChild); String json = gson.toJson(dmd); objMeta.addUserMetadata("okm", json); // Update object metadata CopyObjectRequest copyObjReq = new CopyObjectRequest(bucket, path, bucket, path); copyObjReq.setNewObjectMetadata(objMeta); s3.copyObject(copyObjReq); } log.info("Don't need to upload document {}", docChild.getPath()); } // Stats stats.setSize(stats.getSize() + docChild.getActualVersion().getSize()); stats.setDocuments(stats.getDocuments() + 1); } finally { IOUtils.closeQuietly(is); IOUtils.closeQuietly(fos); FileUtils.deleteQuietly(tmpDoc); } } for (Iterator<Folder> it = fm.getChildren(token, fldPath).iterator(); it.hasNext();) { InputStream is = null; try { Folder fldChild = it.next(); String path = fldChild.getPath().substring(1) + "/"; is = new ByteArrayInputStream(new byte[0]); ObjectMetadata objMeta = new ObjectMetadata(); objMeta.setContentLength(0); PutObjectRequest request = new PutObjectRequest(bucket, path, is, objMeta); // Metadata if (metadata) { FolderMetadata fmd = ma.getMetadata(fldChild); String json = gson.toJson(fmd); objMeta.addUserMetadata("okm", json); } request.setMetadata(objMeta); s3.putObject(request); ImpExpStats tmp = backupHelper(token, fldChild.getPath(), s3, bucket, metadata, out, deco); // Stats stats.setSize(stats.getSize() + tmp.getSize()); stats.setDocuments(stats.getDocuments() + tmp.getDocuments()); stats.setFolders(stats.getFolders() + tmp.getFolders() + 1); stats.setOk(stats.isOk() && tmp.isOk()); } finally { IOUtils.closeQuietly(is); } } log.debug("backupHelper: {}", stats); return stats; }
From source file:com.shareplaylearn.models.UserItemManager.java
License:Open Source License
/** * Writes items to S3, and item metadata to Redis *//*w ww.j a v a 2 s.c om*/ private void saveItem(String name, byte[] itemData, String contentType, ItemSchema.PresentationType presentationType) throws InternalErrorException { String itemLocation = this.getItemLocation(name, contentType, presentationType); AmazonS3Client s3Client = new AmazonS3Client( new BasicAWSCredentials(SecretsService.amazonClientId, SecretsService.amazonClientSecret)); ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(itemData); ObjectMetadata metadata = this.makeBasicMetadata(itemData.length, false, name); metadata.addUserMetadata(UploadMetadataFields.CONTENT_TYPE, contentType); //TODO: save this metadata, along with location, to local Redis s3Client.putObject(ItemSchema.S3_BUCKET, itemLocation, byteArrayInputStream, metadata); }
From source file:com.shareplaylearn.models.UserItemManager.java
License:Open Source License
private ObjectMetadata makeBasicMetadata(int bufferLength, boolean isPublic, String itemName) { ObjectMetadata fileMetadata = new ObjectMetadata(); fileMetadata.setContentEncoding(MediaType.APPLICATION_OCTET_STREAM); if (isPublic) { fileMetadata.addUserMetadata(UploadMetadataFields.PUBLIC, UploadMetadataFields.TRUE_VALUE); } else {/*w w w. j a va 2 s. c om*/ fileMetadata.addUserMetadata(UploadMetadataFields.PUBLIC, UploadMetadataFields.FALSE_VALUE); } fileMetadata.addUserMetadata(UploadMetadataFields.DISPLAY_NAME, itemName); fileMetadata.setContentLength(bufferLength); return fileMetadata; }
From source file:com.streamsets.datacollector.bundles.SupportBundleManager.java
License:Apache License
/** * Instead of providing support bundle directly to user, upload it to StreamSets backend services. *///from www .j a va2 s . c o m public void uploadNewBundleFromInstances(List<BundleContentGenerator> generators, BundleType bundleType) throws IOException { // Generate bundle SupportBundle bundle = generateNewBundleFromInstances(generators, bundleType); boolean enabled = configuration.get(Constants.UPLOAD_ENABLED, Constants.DEFAULT_UPLOAD_ENABLED); String accessKey = configuration.get(Constants.UPLOAD_ACCESS, Constants.DEFAULT_UPLOAD_ACCESS); String secretKey = configuration.get(Constants.UPLOAD_SECRET, Constants.DEFAULT_UPLOAD_SECRET); String bucket = configuration.get(Constants.UPLOAD_BUCKET, Constants.DEFAULT_UPLOAD_BUCKET); int bufferSize = configuration.get(Constants.UPLOAD_BUFFER_SIZE, Constants.DEFAULT_UPLOAD_BUFFER_SIZE); if (!enabled) { throw new IOException("Uploading support bundles was disabled by administrator."); } AWSCredentialsProvider credentialsProvider = new StaticCredentialsProvider( new BasicAWSCredentials(accessKey, secretKey)); AmazonS3Client s3Client = new AmazonS3Client(credentialsProvider, new ClientConfiguration()); s3Client.setS3ClientOptions(new S3ClientOptions().withPathStyleAccess(true)); s3Client.setRegion(Region.getRegion(Regions.US_WEST_2)); // Object Metadata ObjectMetadata s3Metadata = new ObjectMetadata(); for (Map.Entry<Object, Object> entry : getMetadata(bundleType).entrySet()) { s3Metadata.addUserMetadata((String) entry.getKey(), (String) entry.getValue()); } List<PartETag> partETags; InitiateMultipartUploadResult initResponse = null; try { // Uploading part by part LOG.info("Initiating multi-part support bundle upload"); partETags = new ArrayList<>(); InitiateMultipartUploadRequest initRequest = new InitiateMultipartUploadRequest(bucket, bundle.getBundleKey()); initRequest.setObjectMetadata(s3Metadata); initResponse = s3Client.initiateMultipartUpload(initRequest); } catch (AmazonClientException e) { LOG.error("Support bundle upload failed: ", e); throw new IOException("Support bundle upload failed", e); } try { byte[] buffer = new byte[bufferSize]; int partId = 1; int size = -1; while ((size = readFully(bundle.getInputStream(), buffer)) != -1) { LOG.debug("Uploading part {} of size {}", partId, size); UploadPartRequest uploadRequest = new UploadPartRequest().withBucketName(bucket) .withKey(bundle.getBundleKey()).withUploadId(initResponse.getUploadId()) .withPartNumber(partId++).withInputStream(new ByteArrayInputStream(buffer)) .withPartSize(size); partETags.add(s3Client.uploadPart(uploadRequest).getPartETag()); } CompleteMultipartUploadRequest compRequest = new CompleteMultipartUploadRequest(bucket, bundle.getBundleKey(), initResponse.getUploadId(), partETags); s3Client.completeMultipartUpload(compRequest); LOG.info("Support bundle upload finished"); } catch (Exception e) { LOG.error("Support bundle upload failed", e); s3Client.abortMultipartUpload( new AbortMultipartUploadRequest(bucket, bundle.getBundleKey(), initResponse.getUploadId())); throw new IOException("Can't upload support bundle", e); } finally { // Close the client s3Client.shutdown(); } }
From source file:jenkins.plugins.itemstorage.s3.S3BaseUploadCallable.java
License:Open Source License
protected ObjectMetadata buildMetadata(File file) throws IOException { final ObjectMetadata metadata = new ObjectMetadata(); metadata.setContentType(Mimetypes.getInstance().getMimetype(file.getName())); metadata.setContentLength(file.length()); metadata.setLastModified(new Date(file.lastModified())); if (storageClass != null && !storageClass.isEmpty()) { metadata.setHeader("x-amz-storage-class", storageClass); }//from w w w . ja v a 2 s . c om if (useServerSideEncryption) { metadata.setSSEAlgorithm(ObjectMetadata.AES_256_SERVER_SIDE_ENCRYPTION); } for (Map.Entry<String, String> entry : userMetadata.entrySet()) { final String key = entry.getKey().toLowerCase(); switch (key) { case "cache-control": metadata.setCacheControl(entry.getValue()); break; case "expires": try { final Date expires = new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss z") .parse(entry.getValue()); metadata.setHttpExpiresDate(expires); } catch (ParseException e) { metadata.addUserMetadata(entry.getKey(), entry.getValue()); } break; case "content-encoding": metadata.setContentEncoding(entry.getValue()); break; case "content-type": metadata.setContentType(entry.getValue()); default: metadata.addUserMetadata(entry.getKey(), entry.getValue()); break; } } return metadata; }
From source file:org.anhonesteffort.p25.wav.WaveFileS3Sender.java
License:Open Source License
private ObjectMetadata metadata(long length) { ObjectMetadata metadata = new ObjectMetadata(); metadata.setContentLength(length);// www .j ava2 s .c om metadata.addUserMetadata(METADATA_CHANNEL_ID, proto.toString(channelId)); metadata.addUserMetadata(METADATA_TERMINATED, wasTerminated().toString()); metadata.addUserMetadata(METADATA_START_TIME, getStartTime().toString()); metadata.addUserMetadata(METADATA_END_TIME, getEndTime().toString()); metadata.addUserMetadata(METADATA_LATITUDE, getLatitude().toString()); metadata.addUserMetadata(METADATA_LONGITUDE, getLongitude().toString()); return metadata; }
From source file:org.apache.streams.s3.S3OutputStreamWrapper.java
License:Apache License
private void addFile() throws Exception { InputStream is = new ByteArrayInputStream(this.outputStream.toByteArray()); int contentLength = outputStream.size(); TransferManager transferManager = new TransferManager(amazonS3Client); ObjectMetadata metadata = new ObjectMetadata(); metadata.setExpirationTime(DateTime.now().plusDays(365 * 3).toDate()); metadata.setContentLength(contentLength); metadata.addUserMetadata("writer", "org.apache.streams"); for (String s : metaData.keySet()) metadata.addUserMetadata(s, metaData.get(s)); String fileNameToWrite = path + fileName; Upload upload = transferManager.upload(bucketName, fileNameToWrite, is, metadata); try {/*from w w w.j a v a 2 s . c om*/ upload.waitForUploadResult(); is.close(); transferManager.shutdownNow(false); LOGGER.info("S3 File Close[{} kb] - {}", contentLength / 1024, path + fileName); } catch (Exception e) { // No Op } }
From source file:org.apereo.portal.portlets.dynamicskin.storage.s3.AwsS3DynamicSkinService.java
License:Apache License
private void addUserMetatadata(final ObjectMetadata metadata) { if (this.awsObjectUserMetadata != null) { for (Entry<String, String> entry : this.awsObjectUserMetadata.entrySet()) { metadata.addUserMetadata(entry.getKey(), entry.getValue()); }/*from ww w.j a va2s .co m*/ } }
From source file:org.apereo.portal.portlets.dynamicskin.storage.s3.AwsS3DynamicSkinService.java
License:Apache License
private void addDynamicSkinMetadata(final ObjectMetadata metadata, final DynamicSkinInstanceData data) { metadata.addUserMetadata(SKIN_UNIQUE_TOKEN_METADATA_KEY, this.getUniqueToken(data)); }