Example usage for com.amazonaws.services.s3.model ObjectMetadata ObjectMetadata

List of usage examples for com.amazonaws.services.s3.model ObjectMetadata ObjectMetadata

Introduction

In this page you can find the example usage for com.amazonaws.services.s3.model ObjectMetadata ObjectMetadata.

Prototype

public ObjectMetadata() 

Source Link

Usage

From source file:org.xwiki.blobstore.s3.internal.S3BlobStore.java

License:Open Source License

@Override
public void putBlob(String path, InputStream content, long length) {
    String normalizedPath = normalizePath(path);

    this.logger.debug("Putting blob to '{}'", normalizedPath);

    ObjectMetadata objectMetadata = new ObjectMetadata();
    if (length > 0) {
        objectMetadata.setContentLength(length);
    }//from   ww w .j  a  va2  s . c o m

    this.client.putObject(this.bucket, normalizedPath, content, objectMetadata);
}

From source file:org.zalando.stups.fullstop.plugin.SaveSecurityGroupsPlugin.java

License:Apache License

private void writeToS3(String content, String prefix) {
    InputStream stream = new ByteArrayInputStream(content.getBytes(StandardCharsets.UTF_8));
    ObjectMetadata metadata = new ObjectMetadata();
    metadata.setContentLength(content.length());
    String fileName = SECURITY_GROUPS + new DateTime(UTC) + JSON;
    s3Writer.putObjectToS3(bucketName, fileName, prefix, metadata, stream);
}

From source file:org.zalando.stups.fullstop.s3.S3Writer.java

License:Apache License

public void writeToS3(String accountId, String region, Date instanceBootTime, String logData, String logType,
        String instanceId) throws IOException {
    String fileName = null;//from  w  ww. j  a v  a2s . c  o m

    DateTime dateTime = new DateTime(instanceBootTime, UTC);

    String keyName = Paths.get(accountId, region, dateTime.toString("YYYY"), dateTime.toString("MM"),
            dateTime.toString("dd"), instanceId + "-" + dateTime).toString();

    switch (logType) {
    case USER_DATA:
        fileName = TAUPAGE_YAML;
        break;
    case AUDIT_LOG:
        fileName = AUDIT_LOG_FILE_NAME + new DateTime(UTC) + LOG_GZ;
        break;
    default:
        logger.error("Wrong logType given: " + logType);
        break;
    }
    ObjectMetadata metadata = new ObjectMetadata();
    byte[] decodedLogData = Base64.decode(logData);
    metadata.setContentLength(decodedLogData.length);

    InputStream stream = new ByteArrayInputStream(decodedLogData);

    putObjectToS3(bucketName, fileName, keyName, metadata, stream);
}

From source file:oulib.aws.s3.S3Util.java

/**
 * Creates an AWS S3 folder/*from  w  ww.ja  v  a2 s . co m*/
 * 
 * @param bucketName
 * @param folderName
 * @param client 
 */
public static void createFolder(String bucketName, String folderName, AmazonS3 client) {

    try {

        ObjectMetadata metadata = new ObjectMetadata();
        metadata.setContentLength(0);

        InputStream emptyContent = new ByteArrayInputStream(new byte[0]);

        PutObjectRequest putObjectRequest = new PutObjectRequest(bucketName, folderName + "/", emptyContent,
                metadata);

        client.putObject(putObjectRequest);

        System.out
                .println("Sucessfully created the folder of " + folderName + " in the bucket of " + bucketName);
    } catch (Exception ex) {
        System.out.println("Failed to create the folder of " + folderName + " in the bucket of " + bucketName);
        //          Logger.getLogger(AwsS3Processor.class.getName()).log(Level.SEVERE, null, ex);
    }

}

From source file:oulib.aws.s3.S3Util.java

/**
 * Generate a small tiff file from large Tiff S3 bucket object <br>
 * Note: the small tiff file will have the same key path as the original one
 * /*from w ww . j a  v a2 s.  c  o m*/
 * @param s3client : S3 client
 * @param s3 : S3 object that con
 * @param targetBucketName : the bucket that stores the small tiff file
 * @param targetKey : key of the object in the target bucket
 * @param compressionRate : compression rate
 * @return : PutObjectResult
 */
public static PutObjectResult generateSmallTiff(AmazonS3 s3client, S3Object s3, String targetBucketName,
        String targetKey, double compressionRate) {

    PutObjectResult result = null;
    ByteArrayOutputStream bos = null;
    ByteArrayOutputStream os = null;
    ByteArrayInputStream is = null;
    S3ObjectInputStream s = null;
    ByteArrayInputStream byteInputStream = null;

    try {
        System.setProperty("com.sun.media.jai.disableMediaLib", "true");

        bos = new ByteArrayOutputStream();
        s = s3.getObjectContent();
        byte[] bytes = IOUtils.toByteArray(s);
        byteInputStream = new ByteArrayInputStream(bytes);

        TIFFDecodeParam param = new TIFFDecodeParam();
        ImageDecoder dec = ImageCodec.createImageDecoder("TIFF", byteInputStream, param);

        RenderedImage image = dec.decodeAsRenderedImage();

        RenderingHints qualityHints = new RenderingHints(RenderingHints.KEY_RENDERING,
                RenderingHints.VALUE_RENDER_QUALITY);

        RenderedOp resizedImage = JAI.create("SubsampleAverage", image, compressionRate, compressionRate,
                qualityHints);

        TIFFEncodeParam params = new com.sun.media.jai.codec.TIFFEncodeParam();

        resizedImage = JAI.create("encode", resizedImage, bos, "TIFF", params);

        BufferedImage imagenew = resizedImage.getSourceImage(0).getAsBufferedImage();

        os = new ByteArrayOutputStream();
        ImageIO.write(imagenew, "tif", os);
        is = new ByteArrayInputStream(os.toByteArray());

        ObjectMetadata metadata = new ObjectMetadata();
        metadata.setContentLength(os.toByteArray().length);
        metadata.setContentType("image/tiff");
        metadata.setLastModified(new Date());

        os.close();

        imagenew.flush();

        result = s3client.putObject(new PutObjectRequest(targetBucketName, targetKey, is, metadata));
    } catch (IOException | AmazonClientException ex) {
        Logger.getLogger(S3Util.class.getName()).log(Level.SEVERE, null, ex);
    } finally {
        try {
            if (bos != null) {
                bos.close();
            }
            if (os != null) {
                os.close();
            }
            if (is != null) {
                is.close();
            }
            if (s != null) {
                s.close();
            }
            if (byteInputStream != null) {
                byteInputStream.close();
            }
        } catch (IOException ex) {
            Logger.getLogger(S3Util.class.getName()).log(Level.SEVERE, null, ex);
        }
    }

    return result;
}

From source file:oulib.aws.s3.S3Util.java

/**
 * Pull out Tiff metadata from input S3 object and inject into the 
 * content of target S3 Object;<br>
 * Generate the new output S3 object that has the metadata from input object.
 * /*from  w w  w. j  a  va  2s . c  o m*/
 * @param s3client : S3 client
 * @param obj1 : input object that provides metadata
 * @param obj2 : target object that receives metadata
 * 
 * @return PutObjectResult
 */
public static PutObjectResult copyS3ObjectTiffMetadata(AmazonS3 s3client, S3Object obj1, S3Object obj2) {

    PutObjectResult result = null;

    BufferedInputStream bufferedInputStrean = null;
    ByteArrayOutputStream byteArrayOutputStream = null;
    ByteArrayInputStream byteArrayInputStream = null;
    ByteArrayInputStream bis = null;
    S3ObjectInputStream content1 = null;
    S3ObjectInputStream content2 = null;
    String targetBucketName = obj2.getBucketName();
    String outputKey = obj2.getKey().split(".tif")[0] + "-copied.tif";

    ImageMetadata metadata1, metadata2;
    TiffImageMetadata tiffMetadata1, tiffMetadata2;
    TiffOutputSet output1, output2;

    try {
        content1 = obj1.getObjectContent();
        content2 = obj2.getObjectContent();

        byte[] bytes1 = IOUtils.toByteArray(content1);
        byte[] bytes2 = IOUtils.toByteArray(content2);

        metadata1 = Imaging.getMetadata(bytes1);
        metadata2 = Imaging.getMetadata(bytes2);

        tiffMetadata1 = (TiffImageMetadata) metadata1;
        tiffMetadata2 = (TiffImageMetadata) metadata2;

        output1 = tiffMetadata1.getOutputSet();
        output2 = tiffMetadata2.getOutputSet();

        TiffOutputDirectory rootDir = output2.getOrCreateRootDirectory();
        TiffOutputDirectory exifDir = output2.getOrCreateExifDirectory();
        TiffOutputDirectory gpsDir = output2.getOrCreateGPSDirectory();

        if (null != output1.getRootDirectory()) {
            List<TiffOutputField> fs = output1.getRootDirectory().getFields();
            for (TiffOutputField f1 : fs) {
                if (null == rootDir.findField(f1.tag)
                        // CANNOT create the output image with this tag included!
                        && !"PlanarConfiguration".equals(f1.tagInfo.name)) {
                    rootDir.add(f1);
                }
            }
        }

        if (null != output1.getExifDirectory()) {
            for (TiffOutputField f2 : output1.getExifDirectory().getFields()) {
                exifDir.removeField(f2.tagInfo);
                exifDir.add(f2);
            }
        }

        if (null != output1.getGPSDirectory()) {
            for (TiffOutputField f3 : output1.getGPSDirectory().getFields()) {
                gpsDir.removeField(f3.tagInfo);
                gpsDir.add(f3);
            }
        }

        byteArrayOutputStream = new ByteArrayOutputStream();
        TiffImageWriterLossy writerLossy = new TiffImageWriterLossy(output2.byteOrder);
        writerLossy.write(byteArrayOutputStream, output2);

        byteArrayInputStream = new ByteArrayInputStream(byteArrayOutputStream.toByteArray());

        ObjectMetadata metadata = new ObjectMetadata();
        metadata.setContentLength(byteArrayOutputStream.toByteArray().length);
        metadata.setContentType("image/tiff");
        metadata.setLastModified(new Date());

        result = s3client
                .putObject(new PutObjectRequest(targetBucketName, outputKey, byteArrayInputStream, metadata));

    } catch (ImageReadException | IOException | ImageWriteException ex) {
        Logger.getLogger(S3Util.class.getName()).log(Level.SEVERE, null, ex);
    } finally {
        try {
            if (null != content1) {
                content1.close();
            }
            if (null != content2) {
                content2.close();
            }
            if (null != bufferedInputStrean) {
                bufferedInputStrean.close();
            }
            if (null != byteArrayInputStream) {
                byteArrayInputStream.close();
            }
            if (null != byteArrayOutputStream) {
                byteArrayOutputStream.close();
            }
            if (null != bis) {
                bis.close();
            }
        } catch (IOException ex) {
            Logger.getLogger(S3Util.class.getName()).log(Level.SEVERE, null, ex);
        }
    }
    return result;
}

From source file:pl.pawlik.cymes.controllers.FormController.java

@RequestMapping(value = "/upload", method = RequestMethod.POST)
public String handleFileUpload(@RequestParam("plik") MultipartFile file) {
    if (!file.isEmpty()) {
        try {//w w  w.java 2  s . com

            //myObject.getClass().getProtectionDomain().getCodeSource()
            System.out.println("------------" + ObjectMapper.class.getProtectionDomain().getCodeSource());
            Logger.getLogger("FormController").log(Level.SEVERE,
                    "------------" + ObjectMapper.class.getProtectionDomain().getCodeSource());
            UUID uuid = UUID.randomUUID();
            String filename = "/uploads/upload_" + uuid.toString();
            String bucketName = "pawliktest";
            String accessKey = "xx";
            String secretKey = "xx";
            byte[] bytes = file.getBytes();
            InputStream inputStream = new ByteArrayInputStream(bytes);
            AmazonS3 s3client = new AmazonS3Client(new BasicAWSCredentials(accessKey, secretKey));
            s3client.putObject(new PutObjectRequest(bucketName, filename, inputStream, new ObjectMetadata()));

            System.out.println("File {} has been successfully uploaded as ");
        } catch (Exception e) {
            e.printStackTrace();
        }
    } else {
        System.out.println("Uploaded file is empty");
    }
    return "redirect:/cymes/upload";
}

From source file:prodoc.StoreAmazonS3.java

License:GNU General Public License

/**
 * //from  w  w  w . jav  a  2 s  . c  o m
 * @param Id
 * @param Ver
 * @param Bytes
 * @return
 * @throws PDException
 */
protected int Insert(String Id, String Ver, InputStream Bytes) throws PDException {
    try {
        s3.putObject(BucketName, GenKey(Id, Ver), Bytes, new ObjectMetadata());
    } catch (Exception ex) {
        PDException.GenPDException("Error_inserting_content", ex.getLocalizedMessage());
    }
    return (-1);
}

From source file:raymond.mockftpserver.S3BucketFileSystem.java

License:Apache License

@Override
public void add(FileSystemEntry entry) {
    ObjectMetadata metaData = new ObjectMetadata();
    PutObjectRequest request;/*  w ww  .  ja v  a  2 s. c om*/
    if (isDirectory(entry)) {
        metaData.setContentLength(0);
        InputStream is = new ByteArrayInputStream(new byte[0]);
        request = new PutObjectRequest(bucket, entry.getPath() + FOLDER_SUFFIX, is, metaData);
    } else {
        metaData.setContentLength(entry.getSize());
        request = new PutObjectRequest(bucket, entry.getPath(), ((FileEntry) entry).createInputStream(),
                metaData);
    }
    request.setStorageClass(StorageClass.ReducedRedundancy);
    s3.putObject(request);
}

From source file:sample.S3EmitterWithMetadata.java

License:Open Source License

@Override
public List<byte[]> emit(final UnmodifiableBuffer<byte[]> buffer) throws IOException {
    List<byte[]> records = buffer.getRecords();
    // Write all of the records to a compressed output stream
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    for (byte[] record : records) {
        try {//from  w w  w .j  a va 2s .co  m
            baos.write(record);
        } catch (Exception e) {
            LOG.error("Error writing record to output stream. Failing this emit attempt. Record: "
                    + Arrays.toString(record), e);
            return buffer.getRecords();
        }
    }
    // Get the Amazon S3 filename
    String s3FileName = getS3FileName(buffer.getFirstSequenceNumber(), buffer.getLastSequenceNumber());
    String s3URI = getS3URI(s3FileName);
    try {
        ByteArrayInputStream object = new ByteArrayInputStream(baos.toByteArray());
        LOG.debug("Starting upload of file " + s3URI + " to Amazon S3 containing " + records.size()
                + " records.");
        ObjectMetadata meta = new ObjectMetadata();
        Date date = new Date();
        GregorianCalendar calendar = new GregorianCalendar();
        calendar.setTime(date);
        calendar.add(Calendar.DATE, 14);
        meta.setExpirationTime(calendar.getTime());
        meta.setSSEAlgorithm(ObjectMetadata.AES_256_SERVER_SIDE_ENCRYPTION);
        meta.setContentLength(baos.size());
        s3client.putObject(s3Bucket, s3FileName, object, meta);
        LOG.info("Successfully emitted " + buffer.getRecords().size() + " records to Amazon S3 in " + s3URI);
        return Collections.emptyList();
    } catch (Exception e) {
        LOG.error("Caught exception when uploading file " + s3URI + "to Amazon S3. Failing this emit attempt.",
                e);
        return buffer.getRecords();
    }
}