Example usage for javax.xml.stream XMLStreamWriter writeStartElement

List of usage examples for javax.xml.stream XMLStreamWriter writeStartElement

Introduction

In this page you can find the example usage for javax.xml.stream XMLStreamWriter writeStartElement.

Prototype

public void writeStartElement(String localName) throws XMLStreamException;

Source Link

Document

Writes a start tag to the output.

Usage

From source file:org.flowable.cmmn.converter.util.CmmnXmlUtil.java

public static boolean writeExtensionElements(BaseElement baseElement, boolean didWriteExtensionStartElement,
        Map<String, String> namespaceMap, XMLStreamWriter xtw) throws Exception {
    if (!baseElement.getExtensionElements().isEmpty()) {
        if (!didWriteExtensionStartElement) {
            xtw.writeStartElement(ELEMENT_EXTENSIONS);
            didWriteExtensionStartElement = true;
        }// w ww.  j  av a 2s. c o m

        if (namespaceMap == null) {
            namespaceMap = new HashMap<>();
        }

        for (List<ExtensionElement> extensionElements : baseElement.getExtensionElements().values()) {
            for (ExtensionElement extensionElement : extensionElements) {
                writeExtensionElement(extensionElement, namespaceMap, xtw);
            }
        }
    }
    return didWriteExtensionStartElement;
}

From source file:org.gatein.management.rest.FailureResponse.java

private void writeXml(OutputStream out, boolean pretty) throws IOException {
    XMLStreamWriter writer;
    try {//from ww  w  .  j ava2  s .  c  o  m
        writer = XMLOutputFactory.newInstance().createXMLStreamWriter(out);
    } catch (XMLStreamException e) {
        throw new IOException("Could not create XML streaming writer.", e);
    }

    try {
        writer.writeStartDocument("UTF-8", "1.0");
        // root element <failureResult>
        nl(writer, pretty);
        writer.writeStartElement("failureResult");
        nl(writer, pretty);
        indent(writer, 1, pretty);

        // <failure>
        writer.writeStartElement("failure");
        writer.writeCharacters(outcome.getFailureDescription());
        writer.writeEndElement();
        nl(writer, pretty);
        indent(writer, 1, pretty);

        // <operationName>
        writer.writeStartElement("operationName");
        writer.writeCharacters(operationName);
        writer.writeEndElement();
        nl(writer, pretty);

        // </failureResult>
        writer.writeEndElement();

        // End document
        writer.writeCharacters("\n");
        writer.writeEndDocument();
        writer.flush();
    } catch (XMLStreamException e) {
        throw new IOException("Exception writing failure response to XML. Failure response message was '"
                + outcome.getFailureDescription() + "'", e);
    }
    //      finally
    //      {
    //         try
    //         {
    //            writer.close();
    //         }
    //         catch (XMLStreamException e)
    //         {
    //            // ignore
    //         }
    //      }
}

From source file:org.gaul.s3proxy.S3ProxyHandler.java

private void handleBlobList(HttpServletRequest request, HttpServletResponse response, BlobStore blobStore,
        String containerName) throws IOException, S3Exception {
    String blobStoreType = getBlobStoreType(blobStore);
    ListContainerOptions options = new ListContainerOptions();
    String encodingType = request.getParameter("encoding-type");
    String delimiter = request.getParameter("delimiter");
    if (delimiter != null) {
        options.delimiter(delimiter);//from  www.  j av  a2s.  co m
    } else {
        options.recursive();
    }
    String prefix = request.getParameter("prefix");
    if (prefix != null && !prefix.isEmpty()) {
        options.prefix(prefix);
    }
    String marker = request.getParameter("marker");
    if (marker != null) {
        if (Quirks.OPAQUE_MARKERS.contains(blobStoreType)) {
            String realMarker = lastKeyToMarker.getIfPresent(Maps.immutableEntry(containerName, marker));
            if (realMarker != null) {
                marker = realMarker;
            }
        }
        options.afterMarker(marker);
    }
    int maxKeys = 1000;
    String maxKeysString = request.getParameter("max-keys");
    if (maxKeysString != null) {
        try {
            maxKeys = Integer.parseInt(maxKeysString);
        } catch (NumberFormatException nfe) {
            throw new S3Exception(S3ErrorCode.INVALID_ARGUMENT, nfe);
        }
        if (maxKeys > 1000) {
            maxKeys = 1000;
        }
    }
    options.maxResults(maxKeys);

    response.setCharacterEncoding("UTF-8");

    PageSet<? extends StorageMetadata> set = blobStore.list(containerName, options);

    try (Writer writer = response.getWriter()) {
        response.setStatus(HttpServletResponse.SC_OK);
        XMLStreamWriter xml = xmlOutputFactory.createXMLStreamWriter(writer);
        xml.writeStartDocument();
        xml.writeStartElement("ListBucketResult");
        xml.writeDefaultNamespace(AWS_XMLNS);

        writeSimpleElement(xml, "Name", containerName);

        if (prefix == null) {
            xml.writeEmptyElement("Prefix");
        } else {
            writeSimpleElement(xml, "Prefix", encodeBlob(encodingType, prefix));
        }

        writeSimpleElement(xml, "MaxKeys", String.valueOf(maxKeys));

        if (marker == null) {
            xml.writeEmptyElement("Marker");
        } else {
            writeSimpleElement(xml, "Marker", encodeBlob(encodingType, marker));
        }

        if (delimiter != null) {
            writeSimpleElement(xml, "Delimiter", encodeBlob(encodingType, delimiter));
        }

        if (encodingType != null && encodingType.equals("url")) {
            writeSimpleElement(xml, "EncodingType", encodingType);
        }

        String nextMarker = set.getNextMarker();
        if (nextMarker != null) {
            writeSimpleElement(xml, "IsTruncated", "true");
            writeSimpleElement(xml, "NextMarker", encodeBlob(encodingType, nextMarker));
            if (Quirks.OPAQUE_MARKERS.contains(blobStoreType)) {
                lastKeyToMarker.put(Maps.immutableEntry(containerName, Iterables.getLast(set).getName()),
                        nextMarker);
            }
        } else {
            writeSimpleElement(xml, "IsTruncated", "false");
        }

        Set<String> commonPrefixes = new TreeSet<>();
        for (StorageMetadata metadata : set) {
            switch (metadata.getType()) {
            case FOLDER:
                continue;
            case RELATIVE_PATH:
                commonPrefixes.add(metadata.getName());
                continue;
            default:
                break;
            }

            xml.writeStartElement("Contents");

            writeSimpleElement(xml, "Key", encodeBlob(encodingType, metadata.getName()));

            Date lastModified = metadata.getLastModified();
            if (lastModified != null) {
                writeSimpleElement(xml, "LastModified", formatDate(lastModified));
            }

            String eTag = metadata.getETag();
            if (eTag != null) {
                writeSimpleElement(xml, "ETag", maybeQuoteETag(eTag));
            }

            writeSimpleElement(xml, "Size", String.valueOf(metadata.getSize()));
            writeSimpleElement(xml, "StorageClass", "STANDARD");

            writeOwnerStanza(xml);

            xml.writeEndElement();
        }

        for (String commonPrefix : commonPrefixes) {
            xml.writeStartElement("CommonPrefixes");

            writeSimpleElement(xml, "Prefix", encodeBlob(encodingType, commonPrefix));

            xml.writeEndElement();
        }

        xml.writeEndElement();
        xml.flush();
    } catch (XMLStreamException xse) {
        throw new IOException(xse);
    }
}

From source file:org.gaul.s3proxy.S3ProxyHandler.java

private void handleInitiateMultipartUpload(HttpServletRequest request, HttpServletResponse response,
        BlobStore blobStore, String containerName, String blobName) throws IOException, S3Exception {
    ByteSource payload = ByteSource.empty();
    BlobBuilder.PayloadBlobBuilder builder = blobStore.blobBuilder(blobName).payload(payload);
    addContentMetdataFromHttpRequest(builder, request);
    builder.contentLength(payload.size());

    BlobAccess access;// ww  w. ja v  a  2s .  com
    String cannedAcl = request.getHeader("x-amz-acl");
    if (cannedAcl == null || cannedAcl.equalsIgnoreCase("private")) {
        access = BlobAccess.PRIVATE;
    } else if (cannedAcl.equalsIgnoreCase("public-read")) {
        access = BlobAccess.PUBLIC_READ;
    } else if (CANNED_ACLS.contains(cannedAcl)) {
        throw new S3Exception(S3ErrorCode.NOT_IMPLEMENTED);
    } else {
        response.sendError(HttpServletResponse.SC_BAD_REQUEST);
        return;
    }
    PutOptions options = new PutOptions().setBlobAccess(access);

    MultipartUpload mpu = blobStore.initiateMultipartUpload(containerName, builder.build().getMetadata(),
            options);

    if (Quirks.MULTIPART_REQUIRES_STUB.contains(getBlobStoreType(blobStore))) {
        blobStore.putBlob(containerName, builder.name(mpu.id()).build(), options);
    }

    try (Writer writer = response.getWriter()) {
        XMLStreamWriter xml = xmlOutputFactory.createXMLStreamWriter(writer);
        xml.writeStartDocument();
        xml.writeStartElement("InitiateMultipartUploadResult");
        xml.writeDefaultNamespace(AWS_XMLNS);

        writeSimpleElement(xml, "Bucket", containerName);
        writeSimpleElement(xml, "Key", blobName);
        writeSimpleElement(xml, "UploadId", mpu.id());

        xml.writeEndElement();
        xml.flush();
    } catch (XMLStreamException xse) {
        throw new IOException(xse);
    }
}

From source file:org.gaul.s3proxy.S3ProxyHandler.java

private void handleCompleteMultipartUpload(HttpServletResponse response, InputStream is, BlobStore blobStore,
        String containerName, String blobName, String uploadId) throws IOException, S3Exception {
    MultipartUpload mpu;//from  w  w  w.  ja v a  2 s.c om
    if (Quirks.MULTIPART_REQUIRES_STUB.contains(getBlobStoreType(blobStore))) {
        Blob stubBlob = blobStore.getBlob(containerName, uploadId);
        BlobAccess access = blobStore.getBlobAccess(containerName, uploadId);
        mpu = MultipartUpload.create(containerName, blobName, uploadId, stubBlob.getMetadata(),
                new PutOptions().setBlobAccess(access));
    } else {
        mpu = MultipartUpload.create(containerName, blobName, uploadId, new MutableBlobMetadataImpl(),
                new PutOptions());
    }

    // List parts to get part sizes and to map multiple Azure parts
    // into single parts.
    ImmutableMap.Builder<Integer, MultipartPart> builder = ImmutableMap.builder();
    for (MultipartPart part : blobStore.listMultipartUpload(mpu)) {
        builder.put(part.partNumber(), part);
    }
    ImmutableMap<Integer, MultipartPart> partsByListing = builder.build();

    List<MultipartPart> parts = new ArrayList<>();
    String blobStoreType = getBlobStoreType(blobStore);
    if (blobStoreType.equals("azureblob")) {
        // TODO: how to sanity check parts?
        for (MultipartPart part : blobStore.listMultipartUpload(mpu)) {
            parts.add(part);
        }
    } else {
        CompleteMultipartUploadRequest cmu = new XmlMapper().readValue(is,
                CompleteMultipartUploadRequest.class);
        // use TreeMap to allow runt last part
        SortedMap<Integer, String> requestParts = new TreeMap<>();
        if (cmu.parts != null) {
            for (CompleteMultipartUploadRequest.Part part : cmu.parts) {
                requestParts.put(part.partNumber, part.eTag);
            }
        }
        for (Iterator<Map.Entry<Integer, String>> it = requestParts.entrySet().iterator(); it.hasNext();) {
            Map.Entry<Integer, String> entry = it.next();
            MultipartPart part = partsByListing.get(entry.getKey());
            if (part == null) {
                throw new S3Exception(S3ErrorCode.INVALID_PART);
            }
            long partSize = part.partSize();
            if (partSize < blobStore.getMinimumMultipartPartSize() && partSize != -1 && it.hasNext()) {
                throw new S3Exception(S3ErrorCode.ENTITY_TOO_SMALL);
            }
            if (part.partETag() != null
                    && !equalsIgnoringSurroundingQuotes(part.partETag(), entry.getValue())) {
                throw new S3Exception(S3ErrorCode.INVALID_PART);
            }
            parts.add(MultipartPart.create(entry.getKey(), partSize, part.partETag(), part.lastModified()));
        }
    }

    if (parts.isEmpty()) {
        // Amazon requires at least one part
        throw new S3Exception(S3ErrorCode.MALFORMED_X_M_L);
    }

    String eTag = blobStore.completeMultipartUpload(mpu, parts);

    if (Quirks.MULTIPART_REQUIRES_STUB.contains(getBlobStoreType(blobStore))) {
        blobStore.removeBlob(containerName, uploadId);
    }

    try (Writer writer = response.getWriter()) {
        XMLStreamWriter xml = xmlOutputFactory.createXMLStreamWriter(writer);
        xml.writeStartDocument();
        xml.writeStartElement("CompleteMultipartUploadResult");
        xml.writeDefaultNamespace(AWS_XMLNS);

        // TODO: bogus value
        writeSimpleElement(xml, "Location", "http://Example-Bucket.s3.amazonaws.com/" + blobName);

        writeSimpleElement(xml, "Bucket", containerName);
        writeSimpleElement(xml, "Key", blobName);

        if (eTag != null) {
            writeSimpleElement(xml, "ETag", maybeQuoteETag(eTag));
        }

        xml.writeEndElement();
        xml.flush();
    } catch (XMLStreamException xse) {
        throw new IOException(xse);
    }
}

From source file:org.gaul.s3proxy.S3ProxyHandler.java

private void handleListParts(HttpServletRequest request, HttpServletResponse response, BlobStore blobStore,
        String containerName, String blobName, String uploadId) throws IOException, S3Exception {
    // support only the no-op zero case
    String partNumberMarker = request.getParameter("part-number-marker");
    if (partNumberMarker != null && !partNumberMarker.equals("0")) {
        throw new S3Exception(S3ErrorCode.NOT_IMPLEMENTED);
    }//from  w  ww .jav a  2s .c om

    // TODO: how to reconstruct original mpu?
    MultipartUpload mpu = MultipartUpload.create(containerName, blobName, uploadId,
            createFakeBlobMetadata(blobStore), new PutOptions());

    List<MultipartPart> parts;
    if (getBlobStoreType(blobStore).equals("azureblob")) {
        // map Azure subparts back into S3 parts
        SortedMap<Integer, Long> map = new TreeMap<>();
        for (MultipartPart part : blobStore.listMultipartUpload(mpu)) {
            int virtualPartNumber = part.partNumber() / 10_000;
            Long size = map.get(virtualPartNumber);
            map.put(virtualPartNumber, (size == null ? 0L : (long) size) + part.partSize());
        }
        parts = new ArrayList<>();
        for (Map.Entry<Integer, Long> entry : map.entrySet()) {
            String eTag = ""; // TODO: bogus value
            Date lastModified = null; // TODO: bogus value
            parts.add(MultipartPart.create(entry.getKey(), entry.getValue(), eTag, lastModified));
        }
    } else {
        parts = blobStore.listMultipartUpload(mpu);
    }

    String encodingType = request.getParameter("encoding-type");

    try (Writer writer = response.getWriter()) {
        XMLStreamWriter xml = xmlOutputFactory.createXMLStreamWriter(writer);
        xml.writeStartDocument();
        xml.writeStartElement("ListPartsResult");
        xml.writeDefaultNamespace(AWS_XMLNS);

        if (encodingType != null && encodingType.equals("url")) {
            writeSimpleElement(xml, "EncodingType", encodingType);
        }

        writeSimpleElement(xml, "Bucket", containerName);
        writeSimpleElement(xml, "Key", encodeBlob(encodingType, blobName));
        writeSimpleElement(xml, "UploadId", uploadId);
        writeInitiatorStanza(xml);
        writeOwnerStanza(xml);
        writeSimpleElement(xml, "StorageClass", "STANDARD");

        // TODO: pagination
        /*
                    writeSimpleElement(xml, "PartNumberMarker", "1");
                    writeSimpleElement(xml, "NextPartNumberMarker", "3");
                    writeSimpleElement(xml, "MaxParts", "2");
                    writeSimpleElement(xml, "IsTruncated", "true");
        */

        for (MultipartPart part : parts) {
            xml.writeStartElement("Part");

            writeSimpleElement(xml, "PartNumber", String.valueOf(part.partNumber()));

            Date lastModified = part.lastModified();
            if (lastModified != null) {
                writeSimpleElement(xml, "LastModified", formatDate(lastModified));
            }

            String eTag = part.partETag();
            if (eTag != null) {
                writeSimpleElement(xml, "ETag", maybeQuoteETag(eTag));
            }

            writeSimpleElement(xml, "Size", String.valueOf(part.partSize()));

            xml.writeEndElement();
        }

        xml.writeEndElement();
        xml.flush();
    } catch (XMLStreamException xse) {
        throw new IOException(xse);
    }
}

From source file:org.gaul.s3proxy.S3ProxyHandler.java

private void handleCopyPart(HttpServletRequest request, HttpServletResponse response, BlobStore blobStore,
        String containerName, String blobName, String uploadId) throws IOException, S3Exception {
    // TODO: duplicated from handlePutBlob
    String copySourceHeader = request.getHeader("x-amz-copy-source");
    copySourceHeader = URLDecoder.decode(copySourceHeader, "UTF-8");
    if (copySourceHeader.startsWith("/")) {
        // Some clients like boto do not include the leading slash
        copySourceHeader = copySourceHeader.substring(1);
    }//from w w w .ja  va2s.  c  o  m
    String[] path = copySourceHeader.split("/", 2);
    if (path.length != 2) {
        throw new S3Exception(S3ErrorCode.INVALID_REQUEST);
    }
    String sourceContainerName = path[0];
    String sourceBlobName = path[1];

    GetOptions options = new GetOptions();
    String range = request.getHeader("x-amz-copy-source-range");
    if (range != null && range.startsWith("bytes=") &&
    // ignore multiple ranges
            range.indexOf(',') == -1) {
        range = range.substring("bytes=".length());
        String[] ranges = range.split("-", 2);
        if (ranges[0].isEmpty()) {
            options.tail(Long.parseLong(ranges[1]));
        } else if (ranges[1].isEmpty()) {
            options.startAt(Long.parseLong(ranges[0]));
        } else {
            options.range(Long.parseLong(ranges[0]), Long.parseLong(ranges[1]));
        }
    }

    String partNumberString = request.getParameter("partNumber");
    if (partNumberString == null) {
        throw new S3Exception(S3ErrorCode.INVALID_ARGUMENT);
    }
    int partNumber;
    try {
        partNumber = Integer.parseInt(partNumberString);
    } catch (NumberFormatException nfe) {
        throw new S3Exception(S3ErrorCode.INVALID_ARGUMENT,
                "Part number must be an integer between 1 and 10000" + ", inclusive", nfe,
                ImmutableMap.of("ArgumentName", "partNumber", "ArgumentValue", partNumberString));
    }
    if (partNumber < 1 || partNumber > 10_000) {
        throw new S3Exception(S3ErrorCode.INVALID_ARGUMENT,
                "Part number must be an integer between 1 and 10000" + ", inclusive", (Throwable) null,
                ImmutableMap.of("ArgumentName", "partNumber", "ArgumentValue", partNumberString));
    }

    // TODO: how to reconstruct original mpu?
    MultipartUpload mpu = MultipartUpload.create(containerName, blobName, uploadId,
            createFakeBlobMetadata(blobStore), new PutOptions());

    Blob blob = blobStore.getBlob(sourceContainerName, sourceBlobName, options);
    if (blob == null) {
        throw new S3Exception(S3ErrorCode.NO_SUCH_KEY);
    }

    BlobMetadata blobMetadata = blob.getMetadata();

    String ifMatch = request.getHeader("x-amz-copy-source-if-match");
    String ifNoneMatch = request.getHeader("x-amz-copy-source-if-modified-since");
    long ifModifiedSince = request.getDateHeader("x-amz-copy-source-if-none-match");
    long ifUnmodifiedSince = request.getDateHeader("x-amz-copy-source-if-unmodified-since");
    String eTag = blobMetadata.getETag();
    if (eTag != null) {
        eTag = maybeQuoteETag(eTag);
        if (ifMatch != null && !ifMatch.equals(eTag)) {
            throw new S3Exception(S3ErrorCode.PRECONDITION_FAILED);
        }
        if (ifNoneMatch != null && ifNoneMatch.equals(eTag)) {
            throw new S3Exception(S3ErrorCode.PRECONDITION_FAILED);
        }
    }

    Date lastModified = blobMetadata.getLastModified();
    if (lastModified != null) {
        if (ifModifiedSince != -1 && lastModified.compareTo(new Date(ifModifiedSince)) <= 0) {
            throw new S3Exception(S3ErrorCode.PRECONDITION_FAILED);
        }
        if (ifUnmodifiedSince != -1 && lastModified.compareTo(new Date(ifUnmodifiedSince)) >= 0) {
            throw new S3Exception(S3ErrorCode.PRECONDITION_FAILED);
        }
    }

    long contentLength = blobMetadata.getContentMetadata().getContentLength();

    String blobStoreType = getBlobStoreType(blobStore);
    try (InputStream is = blob.getPayload().openStream()) {
        if (blobStoreType.equals("azureblob")) {
            // Azure has a maximum part size of 4 MB while S3 has a minimum
            // part size of 5 MB and a maximum of 5 GB.  Split a single S3
            // part multiple Azure parts.
            long azureMaximumMultipartPartSize = blobStore.getMaximumMultipartPartSize();
            HashingInputStream his = new HashingInputStream(Hashing.md5(), is);
            for (int offset = 0, subPartNumber = 0; offset < contentLength; offset += azureMaximumMultipartPartSize, ++subPartNumber) {
                Payload payload = Payloads.newInputStreamPayload(
                        new UncloseableInputStream(ByteStreams.limit(his, azureMaximumMultipartPartSize)));
                payload.getContentMetadata()
                        .setContentLength(Math.min(azureMaximumMultipartPartSize, contentLength - offset));
                blobStore.uploadMultipartPart(mpu, 10_000 * partNumber + subPartNumber, payload);
            }
            eTag = BaseEncoding.base16().lowerCase().encode(his.hash().asBytes());
        } else {
            Payload payload = Payloads.newInputStreamPayload(is);
            payload.getContentMetadata().setContentLength(contentLength);

            MultipartPart part = blobStore.uploadMultipartPart(mpu, partNumber, payload);
            eTag = part.partETag();
        }
    }

    try (Writer writer = response.getWriter()) {
        XMLStreamWriter xml = xmlOutputFactory.createXMLStreamWriter(writer);
        xml.writeStartDocument();
        xml.writeStartElement("CopyObjectResult");
        xml.writeDefaultNamespace(AWS_XMLNS);

        writeSimpleElement(xml, "LastModified", formatDate(lastModified));
        if (eTag != null) {
            writeSimpleElement(xml, "ETag", maybeQuoteETag(eTag));
        }

        xml.writeEndElement();
        xml.flush();
    } catch (XMLStreamException xse) {
        throw new IOException(xse);
    }
}

From source file:org.gephi.statistics.StatisticsModelImpl.java

public void writeXML(XMLStreamWriter writer) throws XMLStreamException {
    writer.writeStartElement("statisticsmodel");

    writer.writeStartElement("reports");
    for (Map.Entry<Class, String> entry : reportMap.entrySet()) {
        if (entry.getValue() != null && !entry.getValue().isEmpty()) {
            writer.writeStartElement("report");
            String report = entry.getValue();
            report = embedImages(report);
            writer.writeAttribute("class", entry.getKey().getName());
            writer.writeAttribute("value", report);
            writer.writeEndElement();//from  w  w  w . ja  v a  2 s . c o m
        }
    }
    writer.writeEndElement();

    writer.writeEndElement();
}

From source file:org.gluewine.trace.XMLTracer.java

@Override
public void beforeInvocation(Object o, Method m, Object[] params) throws Throwable {
    if (isSuppressed())
        return;/*from   www.  j  a va2  s .c om*/

    XMLStreamWriter writer = getWriter();
    if (writer != null) {
        try {
            writer.writeStartElement("method");
            writer.writeAttribute("class", getClassName(o.getClass()));
            writer.writeAttribute("name", m.getName());
            writer.writeAttribute("start", format.format(new Date()));

            for (Object p : params) {
                if (p != null) {
                    writer.writeStartElement("parameter");
                    writer.writeAttribute("class", getClassName(p.getClass()));
                    writer.writeCharacters(p.toString());
                    writer.writeEndElement();
                } else
                    writer.writeEmptyElement("parameter");
            }

        } catch (Throwable e) {
            ErrorLogger.log(getClass(), e);
        }
    } else
        System.out.println("No Writer");

    clearSuppression();
}

From source file:org.gluewine.trace.XMLTracer.java

/**
 * Returns the writer associated with the current thread. If none exists, one is
 * created./*  w  ww  .  ja  v a 2s . c  o  m*/
 *
 * @return The writer to use.
 */
private XMLStreamWriter getWriter() {
    XMLStreamWriter writer = writers.get(Thread.currentThread());

    if (writer == null) {
        try {
            writer = xof.createXMLStreamWriter(
                    new FileWriterWithEncoding(file + "_" + Thread.currentThread().getId() + ".xml", "utf8"));
            writer.writeStartDocument("utf-8", "1.0");
            writer.writeStartElement("trace");
            writer.writeAttribute("thread", Long.toString(Thread.currentThread().getId()));
            writers.put(Thread.currentThread(), writer);
        } catch (Throwable e) {
            ErrorLogger.log(getClass(), e);
        }
    }

    return writer;
}