List of usage examples for javax.xml.stream XMLStreamWriter flush
public void flush() throws XMLStreamException;
From source file:org.exoplatform.services.jcr.ext.artifact.ArtifactManagingServiceImpl.java
protected File createMultiMetadata(String groupId, String artifactId, String current_version, List<String> v_list) throws FileNotFoundException { File temp = null;//from ww w . j av a 2 s.co m try { String filename = getUniqueFilename("maven-metadata.xml"); temp = File.createTempFile(filename, null); OutputStream os = new FileOutputStream(temp); XMLOutputFactory factory = XMLOutputFactory.newInstance(); XMLStreamWriter writer = factory.createXMLStreamWriter(os); try { writer.writeStartDocument("UTF-8", "1.0"); writer.writeStartElement("metadata"); writer.writeStartElement("groupId"); writer.writeCharacters(groupId); writer.writeEndElement(); writer.writeStartElement("artifactId"); writer.writeCharacters(artifactId); writer.writeEndElement(); String elderVersion; if (v_list.size() > 0) { Collections.sort(v_list); // sort list elderVersion = v_list.get(0); // get first element } else elderVersion = current_version; v_list.add(current_version); writer.writeStartElement("version"); writer.writeCharacters(elderVersion); writer.writeEndElement(); writer.writeStartElement("versions"); writer.writeStartElement("versioning"); for (Iterator<String> iterator = v_list.iterator(); iterator.hasNext();) { writer.writeStartElement("version"); writer.writeCharacters(iterator.next()); writer.writeEndElement(); } writer.writeEndElement(); writer.writeEndElement(); writer.writeEndElement(); writer.writeEndDocument(); } finally { writer.flush(); writer.close(); os.close(); } } catch (XMLStreamException e) { LOG.error("Error on creating metadata - XML", e); } catch (IOException e) { LOG.error("Error on creating metadata - FILE", e); } return (temp != null && temp.exists()) ? temp : null; }
From source file:org.flowable.bpmn.converter.BpmnXMLConverter.java
public byte[] convertToXML(BpmnModel model, String encoding) { try {// w w w. jav a2s . co m ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); XMLOutputFactory xof = XMLOutputFactory.newInstance(); OutputStreamWriter out = new OutputStreamWriter(outputStream, encoding); XMLStreamWriter writer = xof.createXMLStreamWriter(out); XMLStreamWriter xtw = new IndentingXMLStreamWriter(writer); DefinitionsRootExport.writeRootElement(model, xtw, encoding); CollaborationExport.writePools(model, xtw); DataStoreExport.writeDataStores(model, xtw); SignalAndMessageDefinitionExport.writeSignalsAndMessages(model, xtw); for (Process process : model.getProcesses()) { if (process.getFlowElements().isEmpty() && process.getLanes().isEmpty()) { // empty process, ignore it continue; } ProcessExport.writeProcess(process, xtw); for (FlowElement flowElement : process.getFlowElements()) { createXML(flowElement, model, xtw); } for (Artifact artifact : process.getArtifacts()) { createXML(artifact, model, xtw); } // end process element xtw.writeEndElement(); } BPMNDIExport.writeBPMNDI(model, xtw); // end definitions root element xtw.writeEndElement(); xtw.writeEndDocument(); xtw.flush(); outputStream.close(); xtw.close(); return outputStream.toByteArray(); } catch (Exception e) { LOGGER.error("Error writing BPMN XML", e); throw new XMLException("Error writing BPMN XML", e); } }
From source file:org.flowable.cmmn.converter.CmmnXmlConverter.java
public byte[] convertToXML(CmmnModel model, String encoding) { try {/* w w w . j a va 2 s . co m*/ ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); XMLOutputFactory xof = XMLOutputFactory.newInstance(); OutputStreamWriter out = new OutputStreamWriter(outputStream, encoding); XMLStreamWriter writer = xof.createXMLStreamWriter(out); XMLStreamWriter xtw = new IndentingXMLStreamWriter(writer); DefinitionsRootExport.writeRootElement(model, xtw, encoding); for (Case caseModel : model.getCases()) { if (caseModel.getPlanModel().getPlanItems().isEmpty()) { // empty case, ignore it continue; } CaseExport.writeCase(caseModel, xtw); Stage planModel = caseModel.getPlanModel(); StageExport.writeStage(planModel, xtw); // end case element xtw.writeEndElement(); } CmmnDIExport.writeCmmnDI(model, xtw); // end definitions root element xtw.writeEndElement(); xtw.writeEndDocument(); xtw.flush(); outputStream.close(); xtw.close(); return outputStream.toByteArray(); } catch (Exception e) { LOGGER.error("Error writing CMMN XML", e); throw new XMLException("Error writing CMMN XML", e); } }
From source file:org.gatein.management.rest.FailureResponse.java
private void writeXml(OutputStream out, boolean pretty) throws IOException { XMLStreamWriter writer; try {/*w w w . ja va 2s . c o m*/ writer = XMLOutputFactory.newInstance().createXMLStreamWriter(out); } catch (XMLStreamException e) { throw new IOException("Could not create XML streaming writer.", e); } try { writer.writeStartDocument("UTF-8", "1.0"); // root element <failureResult> nl(writer, pretty); writer.writeStartElement("failureResult"); nl(writer, pretty); indent(writer, 1, pretty); // <failure> writer.writeStartElement("failure"); writer.writeCharacters(outcome.getFailureDescription()); writer.writeEndElement(); nl(writer, pretty); indent(writer, 1, pretty); // <operationName> writer.writeStartElement("operationName"); writer.writeCharacters(operationName); writer.writeEndElement(); nl(writer, pretty); // </failureResult> writer.writeEndElement(); // End document writer.writeCharacters("\n"); writer.writeEndDocument(); writer.flush(); } catch (XMLStreamException e) { throw new IOException("Exception writing failure response to XML. Failure response message was '" + outcome.getFailureDescription() + "'", e); } // finally // { // try // { // writer.close(); // } // catch (XMLStreamException e) // { // // ignore // } // } }
From source file:org.gaul.s3proxy.S3ProxyHandler.java
private void handleBlobList(HttpServletRequest request, HttpServletResponse response, BlobStore blobStore, String containerName) throws IOException, S3Exception { String blobStoreType = getBlobStoreType(blobStore); ListContainerOptions options = new ListContainerOptions(); String encodingType = request.getParameter("encoding-type"); String delimiter = request.getParameter("delimiter"); if (delimiter != null) { options.delimiter(delimiter);/*w w w .j av a2 s. c o m*/ } else { options.recursive(); } String prefix = request.getParameter("prefix"); if (prefix != null && !prefix.isEmpty()) { options.prefix(prefix); } String marker = request.getParameter("marker"); if (marker != null) { if (Quirks.OPAQUE_MARKERS.contains(blobStoreType)) { String realMarker = lastKeyToMarker.getIfPresent(Maps.immutableEntry(containerName, marker)); if (realMarker != null) { marker = realMarker; } } options.afterMarker(marker); } int maxKeys = 1000; String maxKeysString = request.getParameter("max-keys"); if (maxKeysString != null) { try { maxKeys = Integer.parseInt(maxKeysString); } catch (NumberFormatException nfe) { throw new S3Exception(S3ErrorCode.INVALID_ARGUMENT, nfe); } if (maxKeys > 1000) { maxKeys = 1000; } } options.maxResults(maxKeys); response.setCharacterEncoding("UTF-8"); PageSet<? extends StorageMetadata> set = blobStore.list(containerName, options); try (Writer writer = response.getWriter()) { response.setStatus(HttpServletResponse.SC_OK); XMLStreamWriter xml = xmlOutputFactory.createXMLStreamWriter(writer); xml.writeStartDocument(); xml.writeStartElement("ListBucketResult"); xml.writeDefaultNamespace(AWS_XMLNS); writeSimpleElement(xml, "Name", containerName); if (prefix == null) { xml.writeEmptyElement("Prefix"); } else { writeSimpleElement(xml, "Prefix", encodeBlob(encodingType, prefix)); } writeSimpleElement(xml, "MaxKeys", String.valueOf(maxKeys)); if (marker == null) { xml.writeEmptyElement("Marker"); } else { writeSimpleElement(xml, "Marker", encodeBlob(encodingType, marker)); } if (delimiter != null) { writeSimpleElement(xml, "Delimiter", encodeBlob(encodingType, delimiter)); } if (encodingType != null && encodingType.equals("url")) { writeSimpleElement(xml, "EncodingType", encodingType); } String nextMarker = set.getNextMarker(); if (nextMarker != null) { writeSimpleElement(xml, "IsTruncated", "true"); writeSimpleElement(xml, "NextMarker", encodeBlob(encodingType, nextMarker)); if (Quirks.OPAQUE_MARKERS.contains(blobStoreType)) { lastKeyToMarker.put(Maps.immutableEntry(containerName, Iterables.getLast(set).getName()), nextMarker); } } else { writeSimpleElement(xml, "IsTruncated", "false"); } Set<String> commonPrefixes = new TreeSet<>(); for (StorageMetadata metadata : set) { switch (metadata.getType()) { case FOLDER: continue; case RELATIVE_PATH: commonPrefixes.add(metadata.getName()); continue; default: break; } xml.writeStartElement("Contents"); writeSimpleElement(xml, "Key", encodeBlob(encodingType, metadata.getName())); Date lastModified = metadata.getLastModified(); if (lastModified != null) { writeSimpleElement(xml, "LastModified", formatDate(lastModified)); } String eTag = metadata.getETag(); if (eTag != null) { writeSimpleElement(xml, "ETag", maybeQuoteETag(eTag)); } writeSimpleElement(xml, "Size", String.valueOf(metadata.getSize())); writeSimpleElement(xml, "StorageClass", "STANDARD"); writeOwnerStanza(xml); xml.writeEndElement(); } for (String commonPrefix : commonPrefixes) { xml.writeStartElement("CommonPrefixes"); writeSimpleElement(xml, "Prefix", encodeBlob(encodingType, commonPrefix)); xml.writeEndElement(); } xml.writeEndElement(); xml.flush(); } catch (XMLStreamException xse) { throw new IOException(xse); } }
From source file:org.gaul.s3proxy.S3ProxyHandler.java
private void handleInitiateMultipartUpload(HttpServletRequest request, HttpServletResponse response, BlobStore blobStore, String containerName, String blobName) throws IOException, S3Exception { ByteSource payload = ByteSource.empty(); BlobBuilder.PayloadBlobBuilder builder = blobStore.blobBuilder(blobName).payload(payload); addContentMetdataFromHttpRequest(builder, request); builder.contentLength(payload.size()); BlobAccess access;/*from w w w . j av a2 s. c o m*/ String cannedAcl = request.getHeader("x-amz-acl"); if (cannedAcl == null || cannedAcl.equalsIgnoreCase("private")) { access = BlobAccess.PRIVATE; } else if (cannedAcl.equalsIgnoreCase("public-read")) { access = BlobAccess.PUBLIC_READ; } else if (CANNED_ACLS.contains(cannedAcl)) { throw new S3Exception(S3ErrorCode.NOT_IMPLEMENTED); } else { response.sendError(HttpServletResponse.SC_BAD_REQUEST); return; } PutOptions options = new PutOptions().setBlobAccess(access); MultipartUpload mpu = blobStore.initiateMultipartUpload(containerName, builder.build().getMetadata(), options); if (Quirks.MULTIPART_REQUIRES_STUB.contains(getBlobStoreType(blobStore))) { blobStore.putBlob(containerName, builder.name(mpu.id()).build(), options); } try (Writer writer = response.getWriter()) { XMLStreamWriter xml = xmlOutputFactory.createXMLStreamWriter(writer); xml.writeStartDocument(); xml.writeStartElement("InitiateMultipartUploadResult"); xml.writeDefaultNamespace(AWS_XMLNS); writeSimpleElement(xml, "Bucket", containerName); writeSimpleElement(xml, "Key", blobName); writeSimpleElement(xml, "UploadId", mpu.id()); xml.writeEndElement(); xml.flush(); } catch (XMLStreamException xse) { throw new IOException(xse); } }
From source file:org.gaul.s3proxy.S3ProxyHandler.java
private void handleCompleteMultipartUpload(HttpServletResponse response, InputStream is, BlobStore blobStore, String containerName, String blobName, String uploadId) throws IOException, S3Exception { MultipartUpload mpu;//from w w w . j a v a2s .co m if (Quirks.MULTIPART_REQUIRES_STUB.contains(getBlobStoreType(blobStore))) { Blob stubBlob = blobStore.getBlob(containerName, uploadId); BlobAccess access = blobStore.getBlobAccess(containerName, uploadId); mpu = MultipartUpload.create(containerName, blobName, uploadId, stubBlob.getMetadata(), new PutOptions().setBlobAccess(access)); } else { mpu = MultipartUpload.create(containerName, blobName, uploadId, new MutableBlobMetadataImpl(), new PutOptions()); } // List parts to get part sizes and to map multiple Azure parts // into single parts. ImmutableMap.Builder<Integer, MultipartPart> builder = ImmutableMap.builder(); for (MultipartPart part : blobStore.listMultipartUpload(mpu)) { builder.put(part.partNumber(), part); } ImmutableMap<Integer, MultipartPart> partsByListing = builder.build(); List<MultipartPart> parts = new ArrayList<>(); String blobStoreType = getBlobStoreType(blobStore); if (blobStoreType.equals("azureblob")) { // TODO: how to sanity check parts? for (MultipartPart part : blobStore.listMultipartUpload(mpu)) { parts.add(part); } } else { CompleteMultipartUploadRequest cmu = new XmlMapper().readValue(is, CompleteMultipartUploadRequest.class); // use TreeMap to allow runt last part SortedMap<Integer, String> requestParts = new TreeMap<>(); if (cmu.parts != null) { for (CompleteMultipartUploadRequest.Part part : cmu.parts) { requestParts.put(part.partNumber, part.eTag); } } for (Iterator<Map.Entry<Integer, String>> it = requestParts.entrySet().iterator(); it.hasNext();) { Map.Entry<Integer, String> entry = it.next(); MultipartPart part = partsByListing.get(entry.getKey()); if (part == null) { throw new S3Exception(S3ErrorCode.INVALID_PART); } long partSize = part.partSize(); if (partSize < blobStore.getMinimumMultipartPartSize() && partSize != -1 && it.hasNext()) { throw new S3Exception(S3ErrorCode.ENTITY_TOO_SMALL); } if (part.partETag() != null && !equalsIgnoringSurroundingQuotes(part.partETag(), entry.getValue())) { throw new S3Exception(S3ErrorCode.INVALID_PART); } parts.add(MultipartPart.create(entry.getKey(), partSize, part.partETag(), part.lastModified())); } } if (parts.isEmpty()) { // Amazon requires at least one part throw new S3Exception(S3ErrorCode.MALFORMED_X_M_L); } String eTag = blobStore.completeMultipartUpload(mpu, parts); if (Quirks.MULTIPART_REQUIRES_STUB.contains(getBlobStoreType(blobStore))) { blobStore.removeBlob(containerName, uploadId); } try (Writer writer = response.getWriter()) { XMLStreamWriter xml = xmlOutputFactory.createXMLStreamWriter(writer); xml.writeStartDocument(); xml.writeStartElement("CompleteMultipartUploadResult"); xml.writeDefaultNamespace(AWS_XMLNS); // TODO: bogus value writeSimpleElement(xml, "Location", "http://Example-Bucket.s3.amazonaws.com/" + blobName); writeSimpleElement(xml, "Bucket", containerName); writeSimpleElement(xml, "Key", blobName); if (eTag != null) { writeSimpleElement(xml, "ETag", maybeQuoteETag(eTag)); } xml.writeEndElement(); xml.flush(); } catch (XMLStreamException xse) { throw new IOException(xse); } }
From source file:org.gaul.s3proxy.S3ProxyHandler.java
private void handleListParts(HttpServletRequest request, HttpServletResponse response, BlobStore blobStore, String containerName, String blobName, String uploadId) throws IOException, S3Exception { // support only the no-op zero case String partNumberMarker = request.getParameter("part-number-marker"); if (partNumberMarker != null && !partNumberMarker.equals("0")) { throw new S3Exception(S3ErrorCode.NOT_IMPLEMENTED); }/*from ww w. jav a 2 s . c o m*/ // TODO: how to reconstruct original mpu? MultipartUpload mpu = MultipartUpload.create(containerName, blobName, uploadId, createFakeBlobMetadata(blobStore), new PutOptions()); List<MultipartPart> parts; if (getBlobStoreType(blobStore).equals("azureblob")) { // map Azure subparts back into S3 parts SortedMap<Integer, Long> map = new TreeMap<>(); for (MultipartPart part : blobStore.listMultipartUpload(mpu)) { int virtualPartNumber = part.partNumber() / 10_000; Long size = map.get(virtualPartNumber); map.put(virtualPartNumber, (size == null ? 0L : (long) size) + part.partSize()); } parts = new ArrayList<>(); for (Map.Entry<Integer, Long> entry : map.entrySet()) { String eTag = ""; // TODO: bogus value Date lastModified = null; // TODO: bogus value parts.add(MultipartPart.create(entry.getKey(), entry.getValue(), eTag, lastModified)); } } else { parts = blobStore.listMultipartUpload(mpu); } String encodingType = request.getParameter("encoding-type"); try (Writer writer = response.getWriter()) { XMLStreamWriter xml = xmlOutputFactory.createXMLStreamWriter(writer); xml.writeStartDocument(); xml.writeStartElement("ListPartsResult"); xml.writeDefaultNamespace(AWS_XMLNS); if (encodingType != null && encodingType.equals("url")) { writeSimpleElement(xml, "EncodingType", encodingType); } writeSimpleElement(xml, "Bucket", containerName); writeSimpleElement(xml, "Key", encodeBlob(encodingType, blobName)); writeSimpleElement(xml, "UploadId", uploadId); writeInitiatorStanza(xml); writeOwnerStanza(xml); writeSimpleElement(xml, "StorageClass", "STANDARD"); // TODO: pagination /* writeSimpleElement(xml, "PartNumberMarker", "1"); writeSimpleElement(xml, "NextPartNumberMarker", "3"); writeSimpleElement(xml, "MaxParts", "2"); writeSimpleElement(xml, "IsTruncated", "true"); */ for (MultipartPart part : parts) { xml.writeStartElement("Part"); writeSimpleElement(xml, "PartNumber", String.valueOf(part.partNumber())); Date lastModified = part.lastModified(); if (lastModified != null) { writeSimpleElement(xml, "LastModified", formatDate(lastModified)); } String eTag = part.partETag(); if (eTag != null) { writeSimpleElement(xml, "ETag", maybeQuoteETag(eTag)); } writeSimpleElement(xml, "Size", String.valueOf(part.partSize())); xml.writeEndElement(); } xml.writeEndElement(); xml.flush(); } catch (XMLStreamException xse) { throw new IOException(xse); } }
From source file:org.gaul.s3proxy.S3ProxyHandler.java
private void handleCopyPart(HttpServletRequest request, HttpServletResponse response, BlobStore blobStore, String containerName, String blobName, String uploadId) throws IOException, S3Exception { // TODO: duplicated from handlePutBlob String copySourceHeader = request.getHeader("x-amz-copy-source"); copySourceHeader = URLDecoder.decode(copySourceHeader, "UTF-8"); if (copySourceHeader.startsWith("/")) { // Some clients like boto do not include the leading slash copySourceHeader = copySourceHeader.substring(1); }/*from w ww . j a v a2 s . c o m*/ String[] path = copySourceHeader.split("/", 2); if (path.length != 2) { throw new S3Exception(S3ErrorCode.INVALID_REQUEST); } String sourceContainerName = path[0]; String sourceBlobName = path[1]; GetOptions options = new GetOptions(); String range = request.getHeader("x-amz-copy-source-range"); if (range != null && range.startsWith("bytes=") && // ignore multiple ranges range.indexOf(',') == -1) { range = range.substring("bytes=".length()); String[] ranges = range.split("-", 2); if (ranges[0].isEmpty()) { options.tail(Long.parseLong(ranges[1])); } else if (ranges[1].isEmpty()) { options.startAt(Long.parseLong(ranges[0])); } else { options.range(Long.parseLong(ranges[0]), Long.parseLong(ranges[1])); } } String partNumberString = request.getParameter("partNumber"); if (partNumberString == null) { throw new S3Exception(S3ErrorCode.INVALID_ARGUMENT); } int partNumber; try { partNumber = Integer.parseInt(partNumberString); } catch (NumberFormatException nfe) { throw new S3Exception(S3ErrorCode.INVALID_ARGUMENT, "Part number must be an integer between 1 and 10000" + ", inclusive", nfe, ImmutableMap.of("ArgumentName", "partNumber", "ArgumentValue", partNumberString)); } if (partNumber < 1 || partNumber > 10_000) { throw new S3Exception(S3ErrorCode.INVALID_ARGUMENT, "Part number must be an integer between 1 and 10000" + ", inclusive", (Throwable) null, ImmutableMap.of("ArgumentName", "partNumber", "ArgumentValue", partNumberString)); } // TODO: how to reconstruct original mpu? MultipartUpload mpu = MultipartUpload.create(containerName, blobName, uploadId, createFakeBlobMetadata(blobStore), new PutOptions()); Blob blob = blobStore.getBlob(sourceContainerName, sourceBlobName, options); if (blob == null) { throw new S3Exception(S3ErrorCode.NO_SUCH_KEY); } BlobMetadata blobMetadata = blob.getMetadata(); String ifMatch = request.getHeader("x-amz-copy-source-if-match"); String ifNoneMatch = request.getHeader("x-amz-copy-source-if-modified-since"); long ifModifiedSince = request.getDateHeader("x-amz-copy-source-if-none-match"); long ifUnmodifiedSince = request.getDateHeader("x-amz-copy-source-if-unmodified-since"); String eTag = blobMetadata.getETag(); if (eTag != null) { eTag = maybeQuoteETag(eTag); if (ifMatch != null && !ifMatch.equals(eTag)) { throw new S3Exception(S3ErrorCode.PRECONDITION_FAILED); } if (ifNoneMatch != null && ifNoneMatch.equals(eTag)) { throw new S3Exception(S3ErrorCode.PRECONDITION_FAILED); } } Date lastModified = blobMetadata.getLastModified(); if (lastModified != null) { if (ifModifiedSince != -1 && lastModified.compareTo(new Date(ifModifiedSince)) <= 0) { throw new S3Exception(S3ErrorCode.PRECONDITION_FAILED); } if (ifUnmodifiedSince != -1 && lastModified.compareTo(new Date(ifUnmodifiedSince)) >= 0) { throw new S3Exception(S3ErrorCode.PRECONDITION_FAILED); } } long contentLength = blobMetadata.getContentMetadata().getContentLength(); String blobStoreType = getBlobStoreType(blobStore); try (InputStream is = blob.getPayload().openStream()) { if (blobStoreType.equals("azureblob")) { // Azure has a maximum part size of 4 MB while S3 has a minimum // part size of 5 MB and a maximum of 5 GB. Split a single S3 // part multiple Azure parts. long azureMaximumMultipartPartSize = blobStore.getMaximumMultipartPartSize(); HashingInputStream his = new HashingInputStream(Hashing.md5(), is); for (int offset = 0, subPartNumber = 0; offset < contentLength; offset += azureMaximumMultipartPartSize, ++subPartNumber) { Payload payload = Payloads.newInputStreamPayload( new UncloseableInputStream(ByteStreams.limit(his, azureMaximumMultipartPartSize))); payload.getContentMetadata() .setContentLength(Math.min(azureMaximumMultipartPartSize, contentLength - offset)); blobStore.uploadMultipartPart(mpu, 10_000 * partNumber + subPartNumber, payload); } eTag = BaseEncoding.base16().lowerCase().encode(his.hash().asBytes()); } else { Payload payload = Payloads.newInputStreamPayload(is); payload.getContentMetadata().setContentLength(contentLength); MultipartPart part = blobStore.uploadMultipartPart(mpu, partNumber, payload); eTag = part.partETag(); } } try (Writer writer = response.getWriter()) { XMLStreamWriter xml = xmlOutputFactory.createXMLStreamWriter(writer); xml.writeStartDocument(); xml.writeStartElement("CopyObjectResult"); xml.writeDefaultNamespace(AWS_XMLNS); writeSimpleElement(xml, "LastModified", formatDate(lastModified)); if (eTag != null) { writeSimpleElement(xml, "ETag", maybeQuoteETag(eTag)); } xml.writeEndElement(); xml.flush(); } catch (XMLStreamException xse) { throw new IOException(xse); } }
From source file:org.gluewine.trace.XMLTracer.java
@Override public void after(Object o, Method m, Object[] params) { if (isSuppressed()) return;/*from www .ja v a 2s. co m*/ XMLStreamWriter writer = getWriter(); if (writer != null) { try { writer.writeEndElement(); writer.flush(); } catch (Throwable t) { ErrorLogger.log(getClass(), t); } } clearSuppression(); }