Example usage for com.amazonaws.services.s3.model ObjectMetadata ObjectMetadata

List of usage examples for com.amazonaws.services.s3.model ObjectMetadata ObjectMetadata

Introduction

In this page you can find the example usage for com.amazonaws.services.s3.model ObjectMetadata ObjectMetadata.

Prototype

public ObjectMetadata() 

Source Link

Usage

From source file:com.netflix.spinnaker.front50.model.S3StorageService.java

License:Apache License

@Override
public <T extends Timestamped> void storeObject(ObjectType objectType, String objectKey, T item) {
    if (readOnlyMode) {
        throw new ReadOnlyModeException();
    }/*from  w  w  w .jav a  2 s  . co m*/
    try {
        item.setLastModifiedBy(AuthenticatedRequest.getSpinnakerUser().orElse("anonymous"));
        byte[] bytes = objectMapper.writeValueAsBytes(item);

        ObjectMetadata objectMetadata = new ObjectMetadata();
        objectMetadata.setContentLength(bytes.length);
        objectMetadata.setContentMD5(
                new String(org.apache.commons.codec.binary.Base64.encodeBase64(DigestUtils.md5(bytes))));

        amazonS3.putObject(bucket, buildS3Key(objectType.group, objectKey, objectType.defaultMetadataFilename),
                new ByteArrayInputStream(bytes), objectMetadata);
        writeLastModified(objectType.group);
    } catch (JsonProcessingException e) {
        throw new IllegalStateException(e);
    }
}

From source file:com.netflix.spinnaker.front50.model.S3StorageService.java

License:Apache License

private void writeLastModified(String group) {
    if (readOnlyMode) {
        throw new ReadOnlyModeException();
    }// ww w.j  a  v  a  2s .  co  m
    try {
        byte[] bytes = objectMapper
                .writeValueAsBytes(Collections.singletonMap("lastModified", System.currentTimeMillis()));

        ObjectMetadata objectMetadata = new ObjectMetadata();
        objectMetadata.setContentLength(bytes.length);
        objectMetadata.setContentMD5(
                new String(org.apache.commons.codec.binary.Base64.encodeBase64(DigestUtils.md5(bytes))));

        amazonS3.putObject(bucket, buildTypedFolder(rootFolder, group) + "/last-modified.json",
                new ByteArrayInputStream(bytes), objectMetadata);
    } catch (JsonProcessingException e) {
        throw new IllegalStateException(e);
    }
}

From source file:com.netflix.spinnaker.front50.model.S3Support.java

License:Apache License

public void update(String id, T item) {
    try {//  w ww  .  j  a v a2  s  .c o  m
        byte[] bytes = objectMapper.writeValueAsBytes(item);

        ObjectMetadata objectMetadata = new ObjectMetadata();
        objectMetadata.setContentLength(bytes.length);
        objectMetadata.setContentMD5(
                new String(org.apache.commons.codec.binary.Base64.encodeBase64(DigestUtils.md5(bytes))));

        amazonS3.putObject(bucket, buildS3Key(id), new ByteArrayInputStream(bytes), objectMetadata);
        writeLastModified();
    } catch (JsonProcessingException e) {
        throw new IllegalStateException(e);
    }
}

From source file:com.netflix.spinnaker.front50.model.S3Support.java

License:Apache License

private void writeLastModified() {
    try {//w w w.j a v a  2  s. c  o  m
        byte[] bytes = objectMapper
                .writeValueAsBytes(Collections.singletonMap("lastModified", System.currentTimeMillis()));

        ObjectMetadata objectMetadata = new ObjectMetadata();
        objectMetadata.setContentLength(bytes.length);
        objectMetadata.setContentMD5(
                new String(org.apache.commons.codec.binary.Base64.encodeBase64(DigestUtils.md5(bytes))));

        amazonS3.putObject(bucket, rootFolder + "last-modified.json", new ByteArrayInputStream(bytes),
                objectMetadata);
    } catch (JsonProcessingException e) {
        throw new IllegalStateException(e);
    }
}

From source file:com.neu.Spark.MainFrame.java

private void btnReviewSentimentActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnReviewSentimentActionPerformed
    // TODO add your handling code here:
    conn.deleteObject("spark-sentimentanalysis", "file.txt");
    try {/*w  w w.ja va  2 s .c  om*/
        Thread.sleep(15000);
    } catch (InterruptedException ex) {
        Logger.getLogger(MainFrame.class.getName()).log(Level.SEVERE, null, ex);
    }
    for (int i = 0; i < 10; i++) {
        String review = reviewText.getText();
        ByteArrayInputStream input = new ByteArrayInputStream(review.getBytes());
        conn.putObject("spark-sentimentanalysis", "file.txt", input, new ObjectMetadata());
    }
}

From source file:com.nextdoor.bender.ipc.s3.S3Transport.java

License:Apache License

protected void sendStream(InputStream input, String key, long streamSize) throws TransportException {
    /*/*from   www.j  a  v a2  s .  c  om*/
     * Create metadata
     */
    ObjectMetadata metadata = new ObjectMetadata();

    /*
     * Find if a multipart upload has already begun or start a new one.
     */
    MultiPartUpload upload;

    synchronized (multiPartUploads) {
        if (!multiPartUploads.containsKey(key)) {
            InitiateMultipartUploadRequest uploadRequest = new InitiateMultipartUploadRequest(bucketName, key);
            uploadRequest.setObjectMetadata(metadata);

            InitiateMultipartUploadResult res = client.initiateMultipartUpload(uploadRequest);
            upload = new MultiPartUpload(bucketName, key, res.getUploadId());
            multiPartUploads.put(key, upload);
        } else {
            upload = multiPartUploads.get(key);
        }
    }

    /*
     * Write out to S3. Note that the S3 client auto closes the input stream.
     */
    UploadPartRequest req = upload.getUploadPartRequest().withInputStream(input).withPartSize(streamSize);

    try {
        UploadPartResult res = client.uploadPart(req);
        upload.addPartETag(res.getPartETag());
    } catch (AmazonClientException e) {
        client.abortMultipartUpload(upload.getAbortMultipartUploadRequest());
        throw new TransportException("unable to put file" + e, e);
    } finally {
        try {
            input.close();
        } catch (IOException e) {
            logger.warn("error encountered while closing input stream", e);
        }
    }
}

From source file:com.nike.cerberus.service.S3StoreService.java

License:Apache License

public void put(String path, String value) {
    byte[] content;
    try {/*from  w w  w.j  a  v  a2  s . c  om*/
        content = value.getBytes(ConfigConstants.DEFAULT_ENCODING);
    } catch (UnsupportedEncodingException e) {
        throw new UnexpectedDataEncodingException("Value to be stored has unexpected encoding.", e);
    }
    ByteArrayInputStream contentAsStream = new ByteArrayInputStream(content);
    ObjectMetadata md = new ObjectMetadata();
    md.setContentLength(content.length);
    s3Client.putObject(s3Bucket, getFullPath(path), contentAsStream, md);
}

From source file:com.openkm.util.backup.RepositoryS3Backup.java

License:Open Source License

/**
 * Performs a recursive repository content export with metadata
 *//*w w  w .j  a v a 2  s  .c  o m*/
private static ImpExpStats backupHelper(String token, String fldPath, AmazonS3 s3, String bucket,
        boolean metadata, Writer out, InfoDecorator deco)
        throws FileNotFoundException, PathNotFoundException, AccessDeniedException, ParseException,
        NoSuchGroupException, RepositoryException, IOException, DatabaseException {
    log.info("backup({}, {}, {}, {}, {}, {})", new Object[] { token, fldPath, bucket, metadata, out, deco });
    ImpExpStats stats = new ImpExpStats();
    DocumentModule dm = ModuleManager.getDocumentModule();
    FolderModule fm = ModuleManager.getFolderModule();
    MetadataAdapter ma = MetadataAdapter.getInstance(token);
    Gson gson = new Gson();

    for (Iterator<Document> it = dm.getChildren(token, fldPath).iterator(); it.hasNext();) {
        File tmpDoc = null;
        InputStream is = null;
        FileOutputStream fos = null;
        boolean upload = true;

        try {
            Document docChild = it.next();
            String path = docChild.getPath().substring(1);
            ObjectMetadata objMeta = new ObjectMetadata();

            if (Config.REPOSITORY_CONTENT_CHECKSUM) {
                if (exists(s3, bucket, path)) {
                    objMeta = s3.getObjectMetadata(bucket, path);

                    if (docChild.getActualVersion().getChecksum().equals(objMeta.getETag())) {
                        upload = false;
                    }
                }
            }

            if (upload) {
                tmpDoc = FileUtils.createTempFileFromMime(docChild.getMimeType());
                fos = new FileOutputStream(tmpDoc);
                is = dm.getContent(token, docChild.getPath(), false);
                IOUtils.copy(is, fos);
                PutObjectRequest request = new PutObjectRequest(bucket, path, tmpDoc);

                if (metadata) {
                    // Metadata
                    DocumentMetadata dmd = ma.getMetadata(docChild);
                    String json = gson.toJson(dmd);
                    objMeta.addUserMetadata("okm", json);
                }

                request.setMetadata(objMeta);
                s3.putObject(request);
                out.write(deco.print(docChild.getPath(), docChild.getActualVersion().getSize(), null));
                out.flush();
            } else {
                if (metadata) {
                    // Metadata
                    DocumentMetadata dmd = ma.getMetadata(docChild);
                    String json = gson.toJson(dmd);
                    objMeta.addUserMetadata("okm", json);

                    // Update object metadata
                    CopyObjectRequest copyObjReq = new CopyObjectRequest(bucket, path, bucket, path);
                    copyObjReq.setNewObjectMetadata(objMeta);
                    s3.copyObject(copyObjReq);
                }

                log.info("Don't need to upload document {}", docChild.getPath());
            }

            // Stats
            stats.setSize(stats.getSize() + docChild.getActualVersion().getSize());
            stats.setDocuments(stats.getDocuments() + 1);
        } finally {
            IOUtils.closeQuietly(is);
            IOUtils.closeQuietly(fos);
            FileUtils.deleteQuietly(tmpDoc);
        }
    }

    for (Iterator<Folder> it = fm.getChildren(token, fldPath).iterator(); it.hasNext();) {
        InputStream is = null;

        try {
            Folder fldChild = it.next();
            String path = fldChild.getPath().substring(1) + "/";
            is = new ByteArrayInputStream(new byte[0]);
            ObjectMetadata objMeta = new ObjectMetadata();
            objMeta.setContentLength(0);
            PutObjectRequest request = new PutObjectRequest(bucket, path, is, objMeta);

            // Metadata
            if (metadata) {
                FolderMetadata fmd = ma.getMetadata(fldChild);
                String json = gson.toJson(fmd);
                objMeta.addUserMetadata("okm", json);
            }

            request.setMetadata(objMeta);
            s3.putObject(request);

            ImpExpStats tmp = backupHelper(token, fldChild.getPath(), s3, bucket, metadata, out, deco);

            // Stats
            stats.setSize(stats.getSize() + tmp.getSize());
            stats.setDocuments(stats.getDocuments() + tmp.getDocuments());
            stats.setFolders(stats.getFolders() + tmp.getFolders() + 1);
            stats.setOk(stats.isOk() && tmp.isOk());
        } finally {
            IOUtils.closeQuietly(is);
        }
    }

    log.debug("backupHelper: {}", stats);
    return stats;
}

From source file:com.pinterest.secor.uploader.S3UploadManager.java

License:Apache License

private void enableS3Encryption(PutObjectRequest uploadRequest) {
    ObjectMetadata objectMetadata = new ObjectMetadata();
    objectMetadata.setSSEAlgorithm(ObjectMetadata.AES_256_SERVER_SIDE_ENCRYPTION);
    uploadRequest.setMetadata(objectMetadata);
}

From source file:com.proofpoint.event.collector.combiner.S3CombineObjectMetadataStore.java

License:Apache License

private boolean writeMetadataFile(EventPartition eventPartition, CombinedGroup combinedGroup, String sizeName) {
    byte[] json = jsonCodec.toJson(combinedGroup).getBytes(Charsets.UTF_8);
    URI metadataFile = toMetadataLocation(eventPartition, sizeName);
    try {/*  w ww.  ja  va2 s.c om*/
        ObjectMetadata metadata = new ObjectMetadata();
        metadata.setContentLength(json.length);
        metadata.setContentMD5(Base64.encodeBase64String(DigestUtils.md5(json)));
        metadata.setContentType(MediaType.APPLICATION_JSON);
        InputStream input = new ByteArrayInputStream(json);
        s3Service.putObject(getS3Bucket(metadataFile), getS3ObjectKey(metadataFile), input, metadata);
        return true;
    } catch (AmazonClientException e) {
        log.warn(e, "error writing metadata file: %s", metadataFile);
        return false;
    }
}