List of usage examples for javax.xml.stream XMLStreamWriter writeStartDocument
public void writeStartDocument() throws XMLStreamException;
From source file:org.gaul.s3proxy.S3ProxyHandler.java
private void handleInitiateMultipartUpload(HttpServletRequest request, HttpServletResponse response, BlobStore blobStore, String containerName, String blobName) throws IOException, S3Exception { ByteSource payload = ByteSource.empty(); BlobBuilder.PayloadBlobBuilder builder = blobStore.blobBuilder(blobName).payload(payload); addContentMetdataFromHttpRequest(builder, request); builder.contentLength(payload.size()); BlobAccess access;/*w w w.j a v a 2s .com*/ String cannedAcl = request.getHeader("x-amz-acl"); if (cannedAcl == null || cannedAcl.equalsIgnoreCase("private")) { access = BlobAccess.PRIVATE; } else if (cannedAcl.equalsIgnoreCase("public-read")) { access = BlobAccess.PUBLIC_READ; } else if (CANNED_ACLS.contains(cannedAcl)) { throw new S3Exception(S3ErrorCode.NOT_IMPLEMENTED); } else { response.sendError(HttpServletResponse.SC_BAD_REQUEST); return; } PutOptions options = new PutOptions().setBlobAccess(access); MultipartUpload mpu = blobStore.initiateMultipartUpload(containerName, builder.build().getMetadata(), options); if (Quirks.MULTIPART_REQUIRES_STUB.contains(getBlobStoreType(blobStore))) { blobStore.putBlob(containerName, builder.name(mpu.id()).build(), options); } try (Writer writer = response.getWriter()) { XMLStreamWriter xml = xmlOutputFactory.createXMLStreamWriter(writer); xml.writeStartDocument(); xml.writeStartElement("InitiateMultipartUploadResult"); xml.writeDefaultNamespace(AWS_XMLNS); writeSimpleElement(xml, "Bucket", containerName); writeSimpleElement(xml, "Key", blobName); writeSimpleElement(xml, "UploadId", mpu.id()); xml.writeEndElement(); xml.flush(); } catch (XMLStreamException xse) { throw new IOException(xse); } }
From source file:org.gaul.s3proxy.S3ProxyHandler.java
private void handleCompleteMultipartUpload(HttpServletResponse response, InputStream is, BlobStore blobStore, String containerName, String blobName, String uploadId) throws IOException, S3Exception { MultipartUpload mpu;/*from w w w. j a v a2 s. c o m*/ if (Quirks.MULTIPART_REQUIRES_STUB.contains(getBlobStoreType(blobStore))) { Blob stubBlob = blobStore.getBlob(containerName, uploadId); BlobAccess access = blobStore.getBlobAccess(containerName, uploadId); mpu = MultipartUpload.create(containerName, blobName, uploadId, stubBlob.getMetadata(), new PutOptions().setBlobAccess(access)); } else { mpu = MultipartUpload.create(containerName, blobName, uploadId, new MutableBlobMetadataImpl(), new PutOptions()); } // List parts to get part sizes and to map multiple Azure parts // into single parts. ImmutableMap.Builder<Integer, MultipartPart> builder = ImmutableMap.builder(); for (MultipartPart part : blobStore.listMultipartUpload(mpu)) { builder.put(part.partNumber(), part); } ImmutableMap<Integer, MultipartPart> partsByListing = builder.build(); List<MultipartPart> parts = new ArrayList<>(); String blobStoreType = getBlobStoreType(blobStore); if (blobStoreType.equals("azureblob")) { // TODO: how to sanity check parts? for (MultipartPart part : blobStore.listMultipartUpload(mpu)) { parts.add(part); } } else { CompleteMultipartUploadRequest cmu = new XmlMapper().readValue(is, CompleteMultipartUploadRequest.class); // use TreeMap to allow runt last part SortedMap<Integer, String> requestParts = new TreeMap<>(); if (cmu.parts != null) { for (CompleteMultipartUploadRequest.Part part : cmu.parts) { requestParts.put(part.partNumber, part.eTag); } } for (Iterator<Map.Entry<Integer, String>> it = requestParts.entrySet().iterator(); it.hasNext();) { Map.Entry<Integer, String> entry = it.next(); MultipartPart part = partsByListing.get(entry.getKey()); if (part == null) { throw new S3Exception(S3ErrorCode.INVALID_PART); } long partSize = part.partSize(); if (partSize < blobStore.getMinimumMultipartPartSize() && partSize != -1 && it.hasNext()) { throw new S3Exception(S3ErrorCode.ENTITY_TOO_SMALL); } if (part.partETag() != null && !equalsIgnoringSurroundingQuotes(part.partETag(), entry.getValue())) { throw new S3Exception(S3ErrorCode.INVALID_PART); } parts.add(MultipartPart.create(entry.getKey(), partSize, part.partETag(), part.lastModified())); } } if (parts.isEmpty()) { // Amazon requires at least one part throw new S3Exception(S3ErrorCode.MALFORMED_X_M_L); } String eTag = blobStore.completeMultipartUpload(mpu, parts); if (Quirks.MULTIPART_REQUIRES_STUB.contains(getBlobStoreType(blobStore))) { blobStore.removeBlob(containerName, uploadId); } try (Writer writer = response.getWriter()) { XMLStreamWriter xml = xmlOutputFactory.createXMLStreamWriter(writer); xml.writeStartDocument(); xml.writeStartElement("CompleteMultipartUploadResult"); xml.writeDefaultNamespace(AWS_XMLNS); // TODO: bogus value writeSimpleElement(xml, "Location", "http://Example-Bucket.s3.amazonaws.com/" + blobName); writeSimpleElement(xml, "Bucket", containerName); writeSimpleElement(xml, "Key", blobName); if (eTag != null) { writeSimpleElement(xml, "ETag", maybeQuoteETag(eTag)); } xml.writeEndElement(); xml.flush(); } catch (XMLStreamException xse) { throw new IOException(xse); } }
From source file:org.gaul.s3proxy.S3ProxyHandler.java
private void handleListParts(HttpServletRequest request, HttpServletResponse response, BlobStore blobStore, String containerName, String blobName, String uploadId) throws IOException, S3Exception { // support only the no-op zero case String partNumberMarker = request.getParameter("part-number-marker"); if (partNumberMarker != null && !partNumberMarker.equals("0")) { throw new S3Exception(S3ErrorCode.NOT_IMPLEMENTED); }/*from w w w. ja va 2 s. c om*/ // TODO: how to reconstruct original mpu? MultipartUpload mpu = MultipartUpload.create(containerName, blobName, uploadId, createFakeBlobMetadata(blobStore), new PutOptions()); List<MultipartPart> parts; if (getBlobStoreType(blobStore).equals("azureblob")) { // map Azure subparts back into S3 parts SortedMap<Integer, Long> map = new TreeMap<>(); for (MultipartPart part : blobStore.listMultipartUpload(mpu)) { int virtualPartNumber = part.partNumber() / 10_000; Long size = map.get(virtualPartNumber); map.put(virtualPartNumber, (size == null ? 0L : (long) size) + part.partSize()); } parts = new ArrayList<>(); for (Map.Entry<Integer, Long> entry : map.entrySet()) { String eTag = ""; // TODO: bogus value Date lastModified = null; // TODO: bogus value parts.add(MultipartPart.create(entry.getKey(), entry.getValue(), eTag, lastModified)); } } else { parts = blobStore.listMultipartUpload(mpu); } String encodingType = request.getParameter("encoding-type"); try (Writer writer = response.getWriter()) { XMLStreamWriter xml = xmlOutputFactory.createXMLStreamWriter(writer); xml.writeStartDocument(); xml.writeStartElement("ListPartsResult"); xml.writeDefaultNamespace(AWS_XMLNS); if (encodingType != null && encodingType.equals("url")) { writeSimpleElement(xml, "EncodingType", encodingType); } writeSimpleElement(xml, "Bucket", containerName); writeSimpleElement(xml, "Key", encodeBlob(encodingType, blobName)); writeSimpleElement(xml, "UploadId", uploadId); writeInitiatorStanza(xml); writeOwnerStanza(xml); writeSimpleElement(xml, "StorageClass", "STANDARD"); // TODO: pagination /* writeSimpleElement(xml, "PartNumberMarker", "1"); writeSimpleElement(xml, "NextPartNumberMarker", "3"); writeSimpleElement(xml, "MaxParts", "2"); writeSimpleElement(xml, "IsTruncated", "true"); */ for (MultipartPart part : parts) { xml.writeStartElement("Part"); writeSimpleElement(xml, "PartNumber", String.valueOf(part.partNumber())); Date lastModified = part.lastModified(); if (lastModified != null) { writeSimpleElement(xml, "LastModified", formatDate(lastModified)); } String eTag = part.partETag(); if (eTag != null) { writeSimpleElement(xml, "ETag", maybeQuoteETag(eTag)); } writeSimpleElement(xml, "Size", String.valueOf(part.partSize())); xml.writeEndElement(); } xml.writeEndElement(); xml.flush(); } catch (XMLStreamException xse) { throw new IOException(xse); } }
From source file:org.gaul.s3proxy.S3ProxyHandler.java
private void handleCopyPart(HttpServletRequest request, HttpServletResponse response, BlobStore blobStore, String containerName, String blobName, String uploadId) throws IOException, S3Exception { // TODO: duplicated from handlePutBlob String copySourceHeader = request.getHeader("x-amz-copy-source"); copySourceHeader = URLDecoder.decode(copySourceHeader, "UTF-8"); if (copySourceHeader.startsWith("/")) { // Some clients like boto do not include the leading slash copySourceHeader = copySourceHeader.substring(1); }//from ww w . j a va2 s.c o m String[] path = copySourceHeader.split("/", 2); if (path.length != 2) { throw new S3Exception(S3ErrorCode.INVALID_REQUEST); } String sourceContainerName = path[0]; String sourceBlobName = path[1]; GetOptions options = new GetOptions(); String range = request.getHeader("x-amz-copy-source-range"); if (range != null && range.startsWith("bytes=") && // ignore multiple ranges range.indexOf(',') == -1) { range = range.substring("bytes=".length()); String[] ranges = range.split("-", 2); if (ranges[0].isEmpty()) { options.tail(Long.parseLong(ranges[1])); } else if (ranges[1].isEmpty()) { options.startAt(Long.parseLong(ranges[0])); } else { options.range(Long.parseLong(ranges[0]), Long.parseLong(ranges[1])); } } String partNumberString = request.getParameter("partNumber"); if (partNumberString == null) { throw new S3Exception(S3ErrorCode.INVALID_ARGUMENT); } int partNumber; try { partNumber = Integer.parseInt(partNumberString); } catch (NumberFormatException nfe) { throw new S3Exception(S3ErrorCode.INVALID_ARGUMENT, "Part number must be an integer between 1 and 10000" + ", inclusive", nfe, ImmutableMap.of("ArgumentName", "partNumber", "ArgumentValue", partNumberString)); } if (partNumber < 1 || partNumber > 10_000) { throw new S3Exception(S3ErrorCode.INVALID_ARGUMENT, "Part number must be an integer between 1 and 10000" + ", inclusive", (Throwable) null, ImmutableMap.of("ArgumentName", "partNumber", "ArgumentValue", partNumberString)); } // TODO: how to reconstruct original mpu? MultipartUpload mpu = MultipartUpload.create(containerName, blobName, uploadId, createFakeBlobMetadata(blobStore), new PutOptions()); Blob blob = blobStore.getBlob(sourceContainerName, sourceBlobName, options); if (blob == null) { throw new S3Exception(S3ErrorCode.NO_SUCH_KEY); } BlobMetadata blobMetadata = blob.getMetadata(); String ifMatch = request.getHeader("x-amz-copy-source-if-match"); String ifNoneMatch = request.getHeader("x-amz-copy-source-if-modified-since"); long ifModifiedSince = request.getDateHeader("x-amz-copy-source-if-none-match"); long ifUnmodifiedSince = request.getDateHeader("x-amz-copy-source-if-unmodified-since"); String eTag = blobMetadata.getETag(); if (eTag != null) { eTag = maybeQuoteETag(eTag); if (ifMatch != null && !ifMatch.equals(eTag)) { throw new S3Exception(S3ErrorCode.PRECONDITION_FAILED); } if (ifNoneMatch != null && ifNoneMatch.equals(eTag)) { throw new S3Exception(S3ErrorCode.PRECONDITION_FAILED); } } Date lastModified = blobMetadata.getLastModified(); if (lastModified != null) { if (ifModifiedSince != -1 && lastModified.compareTo(new Date(ifModifiedSince)) <= 0) { throw new S3Exception(S3ErrorCode.PRECONDITION_FAILED); } if (ifUnmodifiedSince != -1 && lastModified.compareTo(new Date(ifUnmodifiedSince)) >= 0) { throw new S3Exception(S3ErrorCode.PRECONDITION_FAILED); } } long contentLength = blobMetadata.getContentMetadata().getContentLength(); String blobStoreType = getBlobStoreType(blobStore); try (InputStream is = blob.getPayload().openStream()) { if (blobStoreType.equals("azureblob")) { // Azure has a maximum part size of 4 MB while S3 has a minimum // part size of 5 MB and a maximum of 5 GB. Split a single S3 // part multiple Azure parts. long azureMaximumMultipartPartSize = blobStore.getMaximumMultipartPartSize(); HashingInputStream his = new HashingInputStream(Hashing.md5(), is); for (int offset = 0, subPartNumber = 0; offset < contentLength; offset += azureMaximumMultipartPartSize, ++subPartNumber) { Payload payload = Payloads.newInputStreamPayload( new UncloseableInputStream(ByteStreams.limit(his, azureMaximumMultipartPartSize))); payload.getContentMetadata() .setContentLength(Math.min(azureMaximumMultipartPartSize, contentLength - offset)); blobStore.uploadMultipartPart(mpu, 10_000 * partNumber + subPartNumber, payload); } eTag = BaseEncoding.base16().lowerCase().encode(his.hash().asBytes()); } else { Payload payload = Payloads.newInputStreamPayload(is); payload.getContentMetadata().setContentLength(contentLength); MultipartPart part = blobStore.uploadMultipartPart(mpu, partNumber, payload); eTag = part.partETag(); } } try (Writer writer = response.getWriter()) { XMLStreamWriter xml = xmlOutputFactory.createXMLStreamWriter(writer); xml.writeStartDocument(); xml.writeStartElement("CopyObjectResult"); xml.writeDefaultNamespace(AWS_XMLNS); writeSimpleElement(xml, "LastModified", formatDate(lastModified)); if (eTag != null) { writeSimpleElement(xml, "ETag", maybeQuoteETag(eTag)); } xml.writeEndElement(); xml.flush(); } catch (XMLStreamException xse) { throw new IOException(xse); } }
From source file:org.graphipedia.dataextract.ExtractData.java
@Override public void run() { logger.info("Start extracting data..."); long startTime = System.currentTimeMillis(); DisambiguationPageExtractor dpExtractor = new DisambiguationPageExtractor(settings, this.language, dpRootCategory, checkpoint, loggerMessageSuffix); dpExtractor.start();//from w w w.j av a 2 s . co m ExtractNamespaces nsExtractor = new ExtractNamespaces(settings, language, loggerMessageSuffix, new File(settings.wikipediaEditionDirectory(language), ExtractNamespaces.NAMESPACE_FILE), checkpoint); nsExtractor.start(); InfoboxTemplatesExtractor itExtractor = new InfoboxTemplatesExtractor(settings, language, itRootCategory, checkpoint, loggerMessageSuffix); itExtractor.start(); ExtractGeoTags geotagsExtractor = new ExtractGeoTags(settings, language, loggerMessageSuffix); geotagsExtractor.start(); try { dpExtractor.join(); nsExtractor.join(); this.ns = nsExtractor.namespaces(); itExtractor.join(); geotagsExtractor.join(); this.geotags = geotagsExtractor.getGeoTags(); } catch (InterruptedException e) { logger.severe("Problems with the threads."); e.printStackTrace(); System.exit(-1); } XMLOutputFactory outputFactory = XMLOutputFactory2.newInstance(); File outputFile = new File(settings.wikipediaEditionDirectory(language), TEMPORARY_LINK_FILE); if (checkpoint.isLinksExtracted(this.language)) { logger.info("Using pages and links from a previous computation"); return; } try { FileOutputStream fout = new FileOutputStream(outputFile.getAbsolutePath()); BufferedOutputStream bos = new BufferedOutputStream(fout); CompressorOutputStream output = new CompressorStreamFactory() .createCompressorOutputStream(CompressorStreamFactory.BZIP2, bos); XMLStreamWriter writer = outputFactory.createXMLStreamWriter(output, "UTF-8"); writer.writeStartDocument(); writer.writeStartElement("d"); LinkExtractor linkExtractor = new LinkExtractor(writer, logger, settings, language, dpExtractor.disambiguationPages(), itExtractor.infoboxTemplates(), this.ns); linkExtractor.parse(settings.getWikipediaXmlFile(language).getAbsolutePath()); writer.writeEndElement(); writer.writeEndDocument(); output.close(); fout.close(); bos.close(); writer.close(); long elapsed = System.currentTimeMillis() - startTime; logger.info("Data extracted in " + ReadableTime.readableTime(elapsed)); } catch (Exception e) { logger.severe("Error while parsing the XML file "); e.printStackTrace(); System.exit(-1); } try { checkpoint.addLinksExtracted(this.language, true); } catch (IOException e) { logger.severe("Error while saving the checkpoint to file"); e.printStackTrace(); System.exit(-1); } settings.getWikipediaXmlFile(language).delete(); }
From source file:org.mule.modules.jive.api.xml.XmlMapper.java
/** * @param xmlRootTag The root xml tag to use in the mapping * @param entity The {@link Map} to be converted in xml * @param writer The {@link Writer} to write the result */// ww w. ja v a 2s.c om public final void map2xml(final String xmlRootTag, final Map<String, Object> entity, final Writer writer) { try { final XMLStreamWriter w = xmlOutputFactory.createXMLStreamWriter(writer); w.writeStartDocument(); w.writeStartElement(xmlRootTag); writeXML(w, entity); w.writeEndElement(); w.writeEndDocument(); } catch (XMLStreamException e) { throw new UnhandledException(e); } }
From source file:org.netbeans.jbatch.modeler.spec.core.Definitions.java
public static void unload(ModelerFile file, List<String> definitionIdList) { File savedFile = file.getFile(); if (definitionIdList.isEmpty()) { return;/* www . j av a2s .co m*/ } try { File cloneSavedFile = File.createTempFile("TMP", "job"); FileUtils.copyFile(savedFile, cloneSavedFile); BufferedReader br = new BufferedReader(new FileReader(savedFile)); String line = null; while ((line = br.readLine()) != null) { System.out.println("pre savedFile : " + line); } XMLOutputFactory xof = XMLOutputFactory.newFactory(); XMLStreamWriter xsw = xof.createXMLStreamWriter(new FileWriter(savedFile)); xsw.setDefaultNamespace("http://jbatchsuite.java.net"); xsw.writeStartDocument(); xsw.writeStartElement("jbatchnb", "root", "http://jbatchsuite.java.net"); xsw.writeNamespace("jbatch", "http://xmlns.jcp.org/xml/ns/javaee"); xsw.writeNamespace("jbatchnb", "http://jbatchsuite.java.net"); xsw.writeNamespace("xsi", "http://www.w3.org/2001/XMLSchema-instance"); xsw.writeNamespace("java", "http://jcp.org/en/jsr/detail?id=270"); xsw.writeNamespace("nbm", "http://nbmodeler.java.net"); if (cloneSavedFile.length() != 0) { try { XMLInputFactory xif = XMLInputFactory.newFactory(); StreamSource xml = new StreamSource(cloneSavedFile); XMLStreamReader xsr = xif.createXMLStreamReader(xml); xsr.nextTag(); while (xsr.getEventType() == XMLStreamConstants.START_ELEMENT) { // Def Y N // Tag N(D) Y(D) // ________________ // T T // ---------------- // // Def Y N // Tag Y(S) N(S) // ________________ // S S // ---------------- // // Def Y N // Tag Y(D) N(S) // ________________ // T S // ---------------- // // (D) => Different // (S) => Same // Y => Id Exist // N => Def Id is null // T => Transform // S => Skip if (xsr.getLocalName().equals("definitions")) { // if (definitionId == null) { // if (xsr.getAttributeValue(null, "id") != null) { // transformXMLStream(xsr, xsw); // } // } else { if (xsr.getAttributeValue(null, "id") == null) { System.out.println("transformXMLStream " + null); transformXMLStream(xsr, xsw); } else { if (!definitionIdList.contains(xsr.getAttributeValue(null, "id"))) { System.out.println("transformXMLStream " + xsr.getAttributeValue(null, "id")); transformXMLStream(xsr, xsw); } else { System.out.println("skipXMLStream " + xsr.getAttributeValue(null, "id")); skipXMLStream(xsr); } } // } } System.out.println( "pre xsr.getEventType() : " + xsr.getEventType() + " " + xsr.getLocalName()); xsr.nextTag(); System.out.println( "post xsr.getEventType() : " + xsr.getEventType() + " " + xsr.getLocalName()); } } catch (XMLStreamException ex) { Exceptions.printStackTrace(ex); } } xsw.writeEndDocument(); xsw.close(); br = new BufferedReader(new FileReader(savedFile)); line = null; while ((line = br.readLine()) != null) { System.out.println("post savedFile : " + line); } } catch (IOException ex) { Exceptions.printStackTrace(ex); } catch (XMLStreamException ex) { Exceptions.printStackTrace(ex); } }
From source file:org.netbeans.jbatch.modeler.specification.model.job.util.JobUtil.java
public void saveModelerFile(ModelerFile modelerFile) { Definitions definitions = (Definitions) modelerFile.getDefinitionElement(); try {/*from w ww . j a v a 2 s. c o m*/ updateBatchDiagram(modelerFile); List<String> closeDefinitionIdList = closeDiagram(modelerFile, definitions.getGarbageDefinitions()); List<String> definitionIdList = new ArrayList<String>(closeDefinitionIdList); // definitionIdList.addAll(definitions.getGarbageDefinitions()); definitionIdList.add(definitions.getId()); File savedFile = modelerFile.getFile(); BufferedReader br = new BufferedReader(new FileReader(savedFile)); String line = null; while ((line = br.readLine()) != null) { System.out.println("savedFile : " + line); } File cloneSavedFile = File.createTempFile("TMP", "job"); FileUtils.copyFile(savedFile, cloneSavedFile); // br = new BufferedReader(new FileReader(cloneSavedFile)); // line = null; // while ((line = br.readLine()) != null) { // System.out.println("line2 : " + line); // } XMLOutputFactory xof = XMLOutputFactory.newFactory(); XMLStreamWriter xsw = xof.createXMLStreamWriter(new FileWriter(savedFile)); xsw.setDefaultNamespace("http://jbatchsuite.java.net"); xsw.writeStartDocument(); xsw.writeStartElement("jbatchnb", "root", "http://jbatchsuite.java.net"); xsw.writeNamespace("jbatch", "http://xmlns.jcp.org/xml/ns/javaee"); xsw.writeNamespace("jbatchnb", "http://jbatchsuite.java.net"); xsw.writeNamespace("xsi", "http://www.w3.org/2001/XMLSchema-instance"); xsw.writeNamespace("java", "http://jcp.org/en/jsr/detail?id=270"); xsw.writeNamespace("nbm", "http://nbmodeler.java.net"); // br = new BufferedReader(new FileReader(savedFile)); // line = null; // while ((line = br.readLine()) != null) { // System.out.println("line3 : " + line); // } if (cloneSavedFile.length() != 0) { try { XMLInputFactory xif = XMLInputFactory.newFactory(); StreamSource xml = new StreamSource(cloneSavedFile); XMLStreamReader xsr = xif.createXMLStreamReader(xml); xsr.nextTag(); while (xsr.getEventType() == XMLStreamConstants.START_ELEMENT) { // Def Y N // Tag N(D) Y(D) // ________________ // T T // ---------------- // // Def Y N // Tag Y(S) N(S) // ________________ // S S // ---------------- // // Def Y N // Tag Y(D) N(S) // ________________ // T S // ---------------- // // (D) => Different // (S) => Same // Y => Id Exist // N => Id is null // T => Transform // S => Skip if (xsr.getLocalName().equals("definitions")) { // if (definitions.getId() == null) { // if (xsr.getAttributeValue(null, "id") != null) { // transformXMLStream(xsr, xsw); // } else { // skipXMLStream(xsr); // } // } else { if (xsr.getAttributeValue(null, "id") == null) { if (definitions.getId() == null) { skipXMLStream(xsr); } else { transformXMLStream(xsr, xsw); } } else { if (!definitionIdList.contains(xsr.getAttributeValue(null, "id"))) { transformXMLStream(xsr, xsw); } else { skipXMLStream(xsr); } } // } } xsr.nextTag(); } } catch (XMLStreamException ex) { Exceptions.printStackTrace(ex); } } JAXBElement<Definitions> je = new JAXBElement<Definitions>( new QName("http://jbatchsuite.java.net", "definitions", "jbatchnb"), Definitions.class, definitions); if (jobContext == null) { jobContext = JAXBContext.newInstance(new Class<?>[] { ShapeDesign.class, Definitions.class }); } if (jobMarshaller == null) { jobMarshaller = jobContext.createMarshaller(); } // output pretty printed jobMarshaller.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, true); jobMarshaller.setProperty(Marshaller.JAXB_FRAGMENT, Boolean.TRUE); // jobMarshaller.setProperty(Marshaller.JAXB_SCHEMA_LOCATION, "http://www.omg.org/spec/Batch/20100524/MODEL http://www.omg.org/spec/Batch/2.0/20100501/Batch20.xsd"); jobMarshaller.setEventHandler(new ValidateJAXB()); jobMarshaller.marshal(je, System.out); jobMarshaller.marshal(je, xsw); // xsw.writeEndElement(); xsw.writeEndDocument(); xsw.close(); // StringWriter sw = new StringWriter(); // jobMarshaller.marshal(file.getDefinitionElement(), sw); // FileUtils.writeStringToFile(savedFile, sw.toString().replaceFirst("xmlns:ns[A-Za-z\\d]{0,3}=\"http://www.omg.org/spec/Batch/20100524/MODEL\"", // "xmlns=\"http://www.omg.org/spec/Batch/20100524/MODEL\"")); } catch (JAXBException ex) { Exceptions.printStackTrace(ex); } catch (IOException ex) { Exceptions.printStackTrace(ex); } catch (XMLStreamException ex) { Exceptions.printStackTrace(ex); } }
From source file:org.osaf.cosmo.dav.impl.StandardDavResponse.java
public void sendDavError(DavException e) throws IOException { setStatus(e.getErrorCode());//from w w w . j a v a 2 s .c om if (!e.hasContent()) return; XMLStreamWriter writer = null; try { ByteArrayOutputStream out = new ByteArrayOutputStream(); writer = XML_OUTPUT_FACTORY.createXMLStreamWriter(out); writer.writeStartDocument(); e.writeTo(writer); writer.writeEndDocument(); setContentType("text/xml; charset=UTF-8"); byte[] bytes = out.toByteArray(); setContentLength(bytes.length); getOutputStream().write(bytes); } catch (Throwable e2) { log.error("Error writing XML", e2); log.error("Original exception", e); setStatus(500); } finally { if (writer != null) { try { writer.close(); } catch (XMLStreamException e2) { log.warn("Unable to close XML writer", e2); } } } }
From source file:org.osaf.cosmo.mc.CollectionService.java
public void writeTo(OutputStream out) throws IOException, XMLStreamException { XMLStreamWriter writer = XML_OUTPUT_FACTORY.createXMLStreamWriter(out); writer.setPrefix(PRE_XML, NS_XML);/*from w w w. ja va 2 s.co m*/ writer.setDefaultNamespace(NS_MC); try { writer.writeStartDocument(); writer.writeStartElement(EL_MC_SERVICE); writer.writeDefaultNamespace(NS_MC); writer.writeAttribute(NS_XML, EL_XML_BASE, locator.getMorseCodeBase()); for (CollectionItem collection : collections) { writer.writeStartElement(EL_MC_COLLECTION); writer.writeAttribute(ATTR_MC_UUID, collection.getUid()); writer.writeAttribute(ATTR_MC_HREF, href(collection)); writer.writeStartElement(EL_MC_NAME); writer.writeCharacters(collection.getDisplayName()); writer.writeEndElement(); for (Ticket ticket : visibleTickets(collection)) { writer.writeStartElement(EL_MC_TICKET); writer.writeAttribute(ATTR_MC_TYPE, ticket.getType().toString()); writer.writeCharacters(ticket.getKey()); writer.writeEndElement(); } writer.writeEndElement(); } writer.writeEndElement(); writer.writeEndDocument(); } finally { writer.close(); } }