List of usage examples for javax.xml.stream XMLStreamWriter writeEndElement
public void writeEndElement() throws XMLStreamException;
From source file:org.gaul.s3proxy.S3ProxyHandler.java
private void handleInitiateMultipartUpload(HttpServletRequest request, HttpServletResponse response, BlobStore blobStore, String containerName, String blobName) throws IOException, S3Exception { ByteSource payload = ByteSource.empty(); BlobBuilder.PayloadBlobBuilder builder = blobStore.blobBuilder(blobName).payload(payload); addContentMetdataFromHttpRequest(builder, request); builder.contentLength(payload.size()); BlobAccess access;//from w w w . j a v a 2s. c om String cannedAcl = request.getHeader("x-amz-acl"); if (cannedAcl == null || cannedAcl.equalsIgnoreCase("private")) { access = BlobAccess.PRIVATE; } else if (cannedAcl.equalsIgnoreCase("public-read")) { access = BlobAccess.PUBLIC_READ; } else if (CANNED_ACLS.contains(cannedAcl)) { throw new S3Exception(S3ErrorCode.NOT_IMPLEMENTED); } else { response.sendError(HttpServletResponse.SC_BAD_REQUEST); return; } PutOptions options = new PutOptions().setBlobAccess(access); MultipartUpload mpu = blobStore.initiateMultipartUpload(containerName, builder.build().getMetadata(), options); if (Quirks.MULTIPART_REQUIRES_STUB.contains(getBlobStoreType(blobStore))) { blobStore.putBlob(containerName, builder.name(mpu.id()).build(), options); } try (Writer writer = response.getWriter()) { XMLStreamWriter xml = xmlOutputFactory.createXMLStreamWriter(writer); xml.writeStartDocument(); xml.writeStartElement("InitiateMultipartUploadResult"); xml.writeDefaultNamespace(AWS_XMLNS); writeSimpleElement(xml, "Bucket", containerName); writeSimpleElement(xml, "Key", blobName); writeSimpleElement(xml, "UploadId", mpu.id()); xml.writeEndElement(); xml.flush(); } catch (XMLStreamException xse) { throw new IOException(xse); } }
From source file:org.gaul.s3proxy.S3ProxyHandler.java
private void handleCompleteMultipartUpload(HttpServletResponse response, InputStream is, BlobStore blobStore, String containerName, String blobName, String uploadId) throws IOException, S3Exception { MultipartUpload mpu;/* w w w . j a va2s .co m*/ if (Quirks.MULTIPART_REQUIRES_STUB.contains(getBlobStoreType(blobStore))) { Blob stubBlob = blobStore.getBlob(containerName, uploadId); BlobAccess access = blobStore.getBlobAccess(containerName, uploadId); mpu = MultipartUpload.create(containerName, blobName, uploadId, stubBlob.getMetadata(), new PutOptions().setBlobAccess(access)); } else { mpu = MultipartUpload.create(containerName, blobName, uploadId, new MutableBlobMetadataImpl(), new PutOptions()); } // List parts to get part sizes and to map multiple Azure parts // into single parts. ImmutableMap.Builder<Integer, MultipartPart> builder = ImmutableMap.builder(); for (MultipartPart part : blobStore.listMultipartUpload(mpu)) { builder.put(part.partNumber(), part); } ImmutableMap<Integer, MultipartPart> partsByListing = builder.build(); List<MultipartPart> parts = new ArrayList<>(); String blobStoreType = getBlobStoreType(blobStore); if (blobStoreType.equals("azureblob")) { // TODO: how to sanity check parts? for (MultipartPart part : blobStore.listMultipartUpload(mpu)) { parts.add(part); } } else { CompleteMultipartUploadRequest cmu = new XmlMapper().readValue(is, CompleteMultipartUploadRequest.class); // use TreeMap to allow runt last part SortedMap<Integer, String> requestParts = new TreeMap<>(); if (cmu.parts != null) { for (CompleteMultipartUploadRequest.Part part : cmu.parts) { requestParts.put(part.partNumber, part.eTag); } } for (Iterator<Map.Entry<Integer, String>> it = requestParts.entrySet().iterator(); it.hasNext();) { Map.Entry<Integer, String> entry = it.next(); MultipartPart part = partsByListing.get(entry.getKey()); if (part == null) { throw new S3Exception(S3ErrorCode.INVALID_PART); } long partSize = part.partSize(); if (partSize < blobStore.getMinimumMultipartPartSize() && partSize != -1 && it.hasNext()) { throw new S3Exception(S3ErrorCode.ENTITY_TOO_SMALL); } if (part.partETag() != null && !equalsIgnoringSurroundingQuotes(part.partETag(), entry.getValue())) { throw new S3Exception(S3ErrorCode.INVALID_PART); } parts.add(MultipartPart.create(entry.getKey(), partSize, part.partETag(), part.lastModified())); } } if (parts.isEmpty()) { // Amazon requires at least one part throw new S3Exception(S3ErrorCode.MALFORMED_X_M_L); } String eTag = blobStore.completeMultipartUpload(mpu, parts); if (Quirks.MULTIPART_REQUIRES_STUB.contains(getBlobStoreType(blobStore))) { blobStore.removeBlob(containerName, uploadId); } try (Writer writer = response.getWriter()) { XMLStreamWriter xml = xmlOutputFactory.createXMLStreamWriter(writer); xml.writeStartDocument(); xml.writeStartElement("CompleteMultipartUploadResult"); xml.writeDefaultNamespace(AWS_XMLNS); // TODO: bogus value writeSimpleElement(xml, "Location", "http://Example-Bucket.s3.amazonaws.com/" + blobName); writeSimpleElement(xml, "Bucket", containerName); writeSimpleElement(xml, "Key", blobName); if (eTag != null) { writeSimpleElement(xml, "ETag", maybeQuoteETag(eTag)); } xml.writeEndElement(); xml.flush(); } catch (XMLStreamException xse) { throw new IOException(xse); } }
From source file:org.gaul.s3proxy.S3ProxyHandler.java
private void handleListParts(HttpServletRequest request, HttpServletResponse response, BlobStore blobStore, String containerName, String blobName, String uploadId) throws IOException, S3Exception { // support only the no-op zero case String partNumberMarker = request.getParameter("part-number-marker"); if (partNumberMarker != null && !partNumberMarker.equals("0")) { throw new S3Exception(S3ErrorCode.NOT_IMPLEMENTED); }/*from w ww .ja v a 2 s . c o m*/ // TODO: how to reconstruct original mpu? MultipartUpload mpu = MultipartUpload.create(containerName, blobName, uploadId, createFakeBlobMetadata(blobStore), new PutOptions()); List<MultipartPart> parts; if (getBlobStoreType(blobStore).equals("azureblob")) { // map Azure subparts back into S3 parts SortedMap<Integer, Long> map = new TreeMap<>(); for (MultipartPart part : blobStore.listMultipartUpload(mpu)) { int virtualPartNumber = part.partNumber() / 10_000; Long size = map.get(virtualPartNumber); map.put(virtualPartNumber, (size == null ? 0L : (long) size) + part.partSize()); } parts = new ArrayList<>(); for (Map.Entry<Integer, Long> entry : map.entrySet()) { String eTag = ""; // TODO: bogus value Date lastModified = null; // TODO: bogus value parts.add(MultipartPart.create(entry.getKey(), entry.getValue(), eTag, lastModified)); } } else { parts = blobStore.listMultipartUpload(mpu); } String encodingType = request.getParameter("encoding-type"); try (Writer writer = response.getWriter()) { XMLStreamWriter xml = xmlOutputFactory.createXMLStreamWriter(writer); xml.writeStartDocument(); xml.writeStartElement("ListPartsResult"); xml.writeDefaultNamespace(AWS_XMLNS); if (encodingType != null && encodingType.equals("url")) { writeSimpleElement(xml, "EncodingType", encodingType); } writeSimpleElement(xml, "Bucket", containerName); writeSimpleElement(xml, "Key", encodeBlob(encodingType, blobName)); writeSimpleElement(xml, "UploadId", uploadId); writeInitiatorStanza(xml); writeOwnerStanza(xml); writeSimpleElement(xml, "StorageClass", "STANDARD"); // TODO: pagination /* writeSimpleElement(xml, "PartNumberMarker", "1"); writeSimpleElement(xml, "NextPartNumberMarker", "3"); writeSimpleElement(xml, "MaxParts", "2"); writeSimpleElement(xml, "IsTruncated", "true"); */ for (MultipartPart part : parts) { xml.writeStartElement("Part"); writeSimpleElement(xml, "PartNumber", String.valueOf(part.partNumber())); Date lastModified = part.lastModified(); if (lastModified != null) { writeSimpleElement(xml, "LastModified", formatDate(lastModified)); } String eTag = part.partETag(); if (eTag != null) { writeSimpleElement(xml, "ETag", maybeQuoteETag(eTag)); } writeSimpleElement(xml, "Size", String.valueOf(part.partSize())); xml.writeEndElement(); } xml.writeEndElement(); xml.flush(); } catch (XMLStreamException xse) { throw new IOException(xse); } }
From source file:org.gaul.s3proxy.S3ProxyHandler.java
private void handleCopyPart(HttpServletRequest request, HttpServletResponse response, BlobStore blobStore, String containerName, String blobName, String uploadId) throws IOException, S3Exception { // TODO: duplicated from handlePutBlob String copySourceHeader = request.getHeader("x-amz-copy-source"); copySourceHeader = URLDecoder.decode(copySourceHeader, "UTF-8"); if (copySourceHeader.startsWith("/")) { // Some clients like boto do not include the leading slash copySourceHeader = copySourceHeader.substring(1); }//from ww w .j a va2 s . c om String[] path = copySourceHeader.split("/", 2); if (path.length != 2) { throw new S3Exception(S3ErrorCode.INVALID_REQUEST); } String sourceContainerName = path[0]; String sourceBlobName = path[1]; GetOptions options = new GetOptions(); String range = request.getHeader("x-amz-copy-source-range"); if (range != null && range.startsWith("bytes=") && // ignore multiple ranges range.indexOf(',') == -1) { range = range.substring("bytes=".length()); String[] ranges = range.split("-", 2); if (ranges[0].isEmpty()) { options.tail(Long.parseLong(ranges[1])); } else if (ranges[1].isEmpty()) { options.startAt(Long.parseLong(ranges[0])); } else { options.range(Long.parseLong(ranges[0]), Long.parseLong(ranges[1])); } } String partNumberString = request.getParameter("partNumber"); if (partNumberString == null) { throw new S3Exception(S3ErrorCode.INVALID_ARGUMENT); } int partNumber; try { partNumber = Integer.parseInt(partNumberString); } catch (NumberFormatException nfe) { throw new S3Exception(S3ErrorCode.INVALID_ARGUMENT, "Part number must be an integer between 1 and 10000" + ", inclusive", nfe, ImmutableMap.of("ArgumentName", "partNumber", "ArgumentValue", partNumberString)); } if (partNumber < 1 || partNumber > 10_000) { throw new S3Exception(S3ErrorCode.INVALID_ARGUMENT, "Part number must be an integer between 1 and 10000" + ", inclusive", (Throwable) null, ImmutableMap.of("ArgumentName", "partNumber", "ArgumentValue", partNumberString)); } // TODO: how to reconstruct original mpu? MultipartUpload mpu = MultipartUpload.create(containerName, blobName, uploadId, createFakeBlobMetadata(blobStore), new PutOptions()); Blob blob = blobStore.getBlob(sourceContainerName, sourceBlobName, options); if (blob == null) { throw new S3Exception(S3ErrorCode.NO_SUCH_KEY); } BlobMetadata blobMetadata = blob.getMetadata(); String ifMatch = request.getHeader("x-amz-copy-source-if-match"); String ifNoneMatch = request.getHeader("x-amz-copy-source-if-modified-since"); long ifModifiedSince = request.getDateHeader("x-amz-copy-source-if-none-match"); long ifUnmodifiedSince = request.getDateHeader("x-amz-copy-source-if-unmodified-since"); String eTag = blobMetadata.getETag(); if (eTag != null) { eTag = maybeQuoteETag(eTag); if (ifMatch != null && !ifMatch.equals(eTag)) { throw new S3Exception(S3ErrorCode.PRECONDITION_FAILED); } if (ifNoneMatch != null && ifNoneMatch.equals(eTag)) { throw new S3Exception(S3ErrorCode.PRECONDITION_FAILED); } } Date lastModified = blobMetadata.getLastModified(); if (lastModified != null) { if (ifModifiedSince != -1 && lastModified.compareTo(new Date(ifModifiedSince)) <= 0) { throw new S3Exception(S3ErrorCode.PRECONDITION_FAILED); } if (ifUnmodifiedSince != -1 && lastModified.compareTo(new Date(ifUnmodifiedSince)) >= 0) { throw new S3Exception(S3ErrorCode.PRECONDITION_FAILED); } } long contentLength = blobMetadata.getContentMetadata().getContentLength(); String blobStoreType = getBlobStoreType(blobStore); try (InputStream is = blob.getPayload().openStream()) { if (blobStoreType.equals("azureblob")) { // Azure has a maximum part size of 4 MB while S3 has a minimum // part size of 5 MB and a maximum of 5 GB. Split a single S3 // part multiple Azure parts. long azureMaximumMultipartPartSize = blobStore.getMaximumMultipartPartSize(); HashingInputStream his = new HashingInputStream(Hashing.md5(), is); for (int offset = 0, subPartNumber = 0; offset < contentLength; offset += azureMaximumMultipartPartSize, ++subPartNumber) { Payload payload = Payloads.newInputStreamPayload( new UncloseableInputStream(ByteStreams.limit(his, azureMaximumMultipartPartSize))); payload.getContentMetadata() .setContentLength(Math.min(azureMaximumMultipartPartSize, contentLength - offset)); blobStore.uploadMultipartPart(mpu, 10_000 * partNumber + subPartNumber, payload); } eTag = BaseEncoding.base16().lowerCase().encode(his.hash().asBytes()); } else { Payload payload = Payloads.newInputStreamPayload(is); payload.getContentMetadata().setContentLength(contentLength); MultipartPart part = blobStore.uploadMultipartPart(mpu, partNumber, payload); eTag = part.partETag(); } } try (Writer writer = response.getWriter()) { XMLStreamWriter xml = xmlOutputFactory.createXMLStreamWriter(writer); xml.writeStartDocument(); xml.writeStartElement("CopyObjectResult"); xml.writeDefaultNamespace(AWS_XMLNS); writeSimpleElement(xml, "LastModified", formatDate(lastModified)); if (eTag != null) { writeSimpleElement(xml, "ETag", maybeQuoteETag(eTag)); } xml.writeEndElement(); xml.flush(); } catch (XMLStreamException xse) { throw new IOException(xse); } }
From source file:org.gephi.statistics.StatisticsModelImpl.java
public void writeXML(XMLStreamWriter writer) throws XMLStreamException { writer.writeStartElement("statisticsmodel"); writer.writeStartElement("reports"); for (Map.Entry<Class, String> entry : reportMap.entrySet()) { if (entry.getValue() != null && !entry.getValue().isEmpty()) { writer.writeStartElement("report"); String report = entry.getValue(); report = embedImages(report); writer.writeAttribute("class", entry.getKey().getName()); writer.writeAttribute("value", report); writer.writeEndElement(); }// w ww . j av a 2s.co m } writer.writeEndElement(); writer.writeEndElement(); }
From source file:org.gluewine.trace.XMLTracer.java
@Override public void beforeInvocation(Object o, Method m, Object[] params) throws Throwable { if (isSuppressed()) return;//from w w w. j a v a2s. c om XMLStreamWriter writer = getWriter(); if (writer != null) { try { writer.writeStartElement("method"); writer.writeAttribute("class", getClassName(o.getClass())); writer.writeAttribute("name", m.getName()); writer.writeAttribute("start", format.format(new Date())); for (Object p : params) { if (p != null) { writer.writeStartElement("parameter"); writer.writeAttribute("class", getClassName(p.getClass())); writer.writeCharacters(p.toString()); writer.writeEndElement(); } else writer.writeEmptyElement("parameter"); } } catch (Throwable e) { ErrorLogger.log(getClass(), e); } } else System.out.println("No Writer"); clearSuppression(); }
From source file:org.gluewine.trace.XMLTracer.java
@Override public void afterSuccess(Object o, Method m, Object[] params, Object result) { if (isSuppressed()) return;// w w w . j a va2 s . co m XMLStreamWriter writer = getWriter(); if (writer != null) { try { if (!m.getReturnType().equals(Void.TYPE)) { writer.writeStartElement("result"); if (result != null) writer.writeCharacters(result.toString()); else writer.writeCharacters("null"); writer.writeEndElement(); } } catch (Throwable e) { ErrorLogger.log(getClass(), e); } } clearSuppression(); }
From source file:org.gluewine.trace.XMLTracer.java
@Override public void afterFailure(Object o, Method m, Object[] params, Throwable e) { if (isSuppressed()) return;//from w ww . j av a 2 s . c o m XMLStreamWriter writer = getWriter(); if (writer != null) { try { StringWriter sw = new StringWriter(); e.printStackTrace(new PrintWriter(sw)); writer.writeStartElement("exception"); writer.writeCharacters(sw.toString()); writer.writeEndElement(); } catch (Throwable t) { ErrorLogger.log(getClass(), t); } } clearSuppression(); }
From source file:org.gluewine.trace.XMLTracer.java
@Override public void after(Object o, Method m, Object[] params) { if (isSuppressed()) return;//w ww .java2 s .co m XMLStreamWriter writer = getWriter(); if (writer != null) { try { writer.writeEndElement(); writer.flush(); } catch (Throwable t) { ErrorLogger.log(getClass(), t); } } clearSuppression(); }
From source file:org.gluewine.trace.XMLTracer.java
@Override public void close() { isSuppressed();/*from w ww. j av a 2s . c o m*/ for (XMLStreamWriter writer : writers.values()) { try { writer.writeEndElement(); // Close the root. writer.writeEndDocument(); writer.flush(); writer.close(); } catch (XMLStreamException e) { ErrorLogger.log(getClass(), e); } } writers.clear(); }