List of usage examples for java.io ByteArrayOutputStream size
public synchronized int size()
From source file:org.elasticsearch.client.RequestConverters.java
static Request bulk(BulkRequest bulkRequest) throws IOException { Request request = new Request(HttpPost.METHOD_NAME, "/_bulk"); Params parameters = new Params(request); parameters.withTimeout(bulkRequest.timeout()); parameters.withRefreshPolicy(bulkRequest.getRefreshPolicy()); // Bulk API only supports newline delimited JSON or Smile. Before executing // the bulk, we need to check that all requests have the same content-type // and this content-type is supported by the Bulk API. XContentType bulkContentType = null; for (int i = 0; i < bulkRequest.numberOfActions(); i++) { DocWriteRequest<?> action = bulkRequest.requests().get(i); DocWriteRequest.OpType opType = action.opType(); if (opType == DocWriteRequest.OpType.INDEX || opType == DocWriteRequest.OpType.CREATE) { bulkContentType = enforceSameContentType((IndexRequest) action, bulkContentType); } else if (opType == DocWriteRequest.OpType.UPDATE) { UpdateRequest updateRequest = (UpdateRequest) action; if (updateRequest.doc() != null) { bulkContentType = enforceSameContentType(updateRequest.doc(), bulkContentType); }// w w w . ja v a2 s . c o m if (updateRequest.upsertRequest() != null) { bulkContentType = enforceSameContentType(updateRequest.upsertRequest(), bulkContentType); } } } if (bulkContentType == null) { bulkContentType = XContentType.JSON; } final byte separator = bulkContentType.xContent().streamSeparator(); final ContentType requestContentType = createContentType(bulkContentType); ByteArrayOutputStream content = new ByteArrayOutputStream(); for (DocWriteRequest<?> action : bulkRequest.requests()) { DocWriteRequest.OpType opType = action.opType(); try (XContentBuilder metadata = XContentBuilder.builder(bulkContentType.xContent())) { metadata.startObject(); { metadata.startObject(opType.getLowercase()); if (Strings.hasLength(action.index())) { metadata.field("_index", action.index()); } if (Strings.hasLength(action.type())) { metadata.field("_type", action.type()); } if (Strings.hasLength(action.id())) { metadata.field("_id", action.id()); } if (Strings.hasLength(action.routing())) { metadata.field("routing", action.routing()); } if (action.version() != Versions.MATCH_ANY) { metadata.field("version", action.version()); } VersionType versionType = action.versionType(); if (versionType != VersionType.INTERNAL) { if (versionType == VersionType.EXTERNAL) { metadata.field("version_type", "external"); } else if (versionType == VersionType.EXTERNAL_GTE) { metadata.field("version_type", "external_gte"); } else if (versionType == VersionType.FORCE) { metadata.field("version_type", "force"); } } if (opType == DocWriteRequest.OpType.INDEX || opType == DocWriteRequest.OpType.CREATE) { IndexRequest indexRequest = (IndexRequest) action; if (Strings.hasLength(indexRequest.getPipeline())) { metadata.field("pipeline", indexRequest.getPipeline()); } } else if (opType == DocWriteRequest.OpType.UPDATE) { UpdateRequest updateRequest = (UpdateRequest) action; if (updateRequest.retryOnConflict() > 0) { metadata.field("retry_on_conflict", updateRequest.retryOnConflict()); } if (updateRequest.fetchSource() != null) { metadata.field("_source", updateRequest.fetchSource()); } } metadata.endObject(); } metadata.endObject(); BytesRef metadataSource = BytesReference.bytes(metadata).toBytesRef(); content.write(metadataSource.bytes, metadataSource.offset, metadataSource.length); content.write(separator); } BytesRef source = null; if (opType == DocWriteRequest.OpType.INDEX || opType == DocWriteRequest.OpType.CREATE) { IndexRequest indexRequest = (IndexRequest) action; BytesReference indexSource = indexRequest.source(); XContentType indexXContentType = indexRequest.getContentType(); try (XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, indexSource, indexXContentType)) { try (XContentBuilder builder = XContentBuilder.builder(bulkContentType.xContent())) { builder.copyCurrentStructure(parser); source = BytesReference.bytes(builder).toBytesRef(); } } } else if (opType == DocWriteRequest.OpType.UPDATE) { source = XContentHelper.toXContent((UpdateRequest) action, bulkContentType, false).toBytesRef(); } if (source != null) { content.write(source.bytes, source.offset, source.length); content.write(separator); } } request.setEntity(new ByteArrayEntity(content.toByteArray(), 0, content.size(), requestContentType)); return request; }
From source file:br.gov.jfrj.siga.ex.bl.ExBL.java
/** * Retorna o contedo de um Map<String,String> na forma de um array de bytes * formatado de acordo com os padres de Url Encoded Form e utilizando * iso-8859-1 como charset.//from www . j av a2 s . c o m * * @param map * @return * @throws IOException * @throws UnsupportedEncodingException */ public static byte[] urlEncodedFormFromMap(Map<String, String> map) throws IOException, UnsupportedEncodingException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); for (String sKey : map.keySet()) { if (baos.size() != 0) baos.write("&".getBytes("iso-8859-1")); baos.write(sKey.getBytes("iso-8859-1")); baos.write('='); baos.write(URLEncoder.encode(map.get(sKey), "iso-8859-1").getBytes()); } byte[] baForm = baos.toByteArray(); return baForm; }
From source file:org.apache.tika.parser.chm.core.ChmExtractor.java
/** * Decompresses a chm entry//ww w. java 2 s . c o m * * @param directoryListingEntry * * @return decompressed data * @throws TikaException */ public byte[] extractChmEntry(DirectoryListingEntry directoryListingEntry) throws TikaException { ByteArrayOutputStream buffer = new ByteArrayOutputStream(); ChmLzxBlock lzxBlock = null; try { /* UNCOMPRESSED type is easiest one */ if (directoryListingEntry.getEntryType() == EntryType.UNCOMPRESSED && directoryListingEntry.getLength() > 0 && !ChmCommons.hasSkip(directoryListingEntry)) { int dataOffset = (int) (getChmItsfHeader().getDataOffset() + directoryListingEntry.getOffset()); // dataSegment = Arrays.copyOfRange(getData(), dataOffset, // dataOffset + directoryListingEntry.getLength()); buffer.write(ChmCommons.copyOfRange(getData(), dataOffset, dataOffset + directoryListingEntry.getLength())); } else if (directoryListingEntry.getEntryType() == EntryType.COMPRESSED && !ChmCommons.hasSkip(directoryListingEntry)) { /* Gets a chm hit_cache info */ ChmBlockInfo bb = ChmBlockInfo.getChmBlockInfoInstance(directoryListingEntry, (int) getChmLzxcResetTable().getBlockLen(), getChmLzxcControlData()); int i = 0, start = 0, hit_cache = 0; if ((getLzxBlockLength() < Integer.MAX_VALUE) && (getLzxBlockOffset() < Integer.MAX_VALUE)) { // TODO: Improve the caching // caching ... = O(n^2) - depends on startBlock and endBlock start = -1; if (!getLzxBlocksCache().isEmpty()) { for (i = 0; i < getLzxBlocksCache().size(); i++) { //lzxBlock = getLzxBlocksCache().get(i); int bn = getLzxBlocksCache().get(i).getBlockNumber(); for (int j = bb.getIniBlock(); j <= bb.getStartBlock(); j++) { if (bn == j) { if (j > start) { start = j; hit_cache = i; } } } if (start == bb.getStartBlock()) break; } } // if (i == getLzxBlocksCache().size() && i == 0) { if (start < 0) { start = bb.getIniBlock(); byte[] dataSegment = ChmCommons.getChmBlockSegment(getData(), getChmLzxcResetTable(), start, (int) getLzxBlockOffset(), (int) getLzxBlockLength()); lzxBlock = new ChmLzxBlock(start, dataSegment, getChmLzxcResetTable().getBlockLen(), null); getLzxBlocksCache().add(lzxBlock); } else { lzxBlock = getLzxBlocksCache().get(hit_cache); } for (i = start; i <= bb.getEndBlock();) { if (i == bb.getStartBlock() && i == bb.getEndBlock()) { buffer.write(lzxBlock.getContent(bb.getStartOffset(), bb.getEndOffset())); break; } if (i == bb.getStartBlock()) { buffer.write(lzxBlock.getContent(bb.getStartOffset())); } if (i > bb.getStartBlock() && i < bb.getEndBlock()) { buffer.write(lzxBlock.getContent()); } if (i == bb.getEndBlock()) { buffer.write(lzxBlock.getContent(0, bb.getEndOffset())); break; } i++; if (i % getChmLzxcControlData().getResetInterval() == 0) { lzxBlock = new ChmLzxBlock(i, ChmCommons.getChmBlockSegment(getData(), getChmLzxcResetTable(), i, (int) getLzxBlockOffset(), (int) getLzxBlockLength()), getChmLzxcResetTable().getBlockLen(), null); } else { lzxBlock = new ChmLzxBlock(i, ChmCommons.getChmBlockSegment(getData(), getChmLzxcResetTable(), i, (int) getLzxBlockOffset(), (int) getLzxBlockLength()), getChmLzxcResetTable().getBlockLen(), lzxBlock); } getLzxBlocksCache().add(lzxBlock); } if (getLzxBlocksCache().size() > getChmLzxcResetTable().getBlockCount()) { getLzxBlocksCache().clear(); } } //end of if if (buffer.size() != directoryListingEntry.getLength()) { throw new TikaException("CHM file extract error: extracted Length is wrong."); } } //end of if compressed } catch (Exception e) { throw new TikaException(e.getMessage()); } return buffer.toByteArray(); }
From source file:com.comcast.oscar.tlv.TlvDisassemble.java
/** * /* www.ja v a2 s. c o m*/ * @return JSONArray */ public JSONArray getTlvDictionary() { boolean localDebug = Boolean.FALSE; //Store the entire return JSONArray jaTlvDictionary = new JSONArray(); //Store JSON Object temp for TLV Value insertion JSONObject joTlvDictionary; //ByteArrayOutputStream Temp ByteArrayOutputStream boasTlvBufferTemp = null; List<Integer> liTlvBuffer = null; //Check which Type of TLV Buffer we are using if (tbTlvBuffer != null) { if (debug | localDebug) System.out.println("TlvDisassemble.getTlvDictionary() - Using TlvBuilder Class"); liTlvBuffer = tbTlvBuffer.getTopLevelTlvList(getTopLevelByteLength()); } else if (tvbTlvBuffer != null) { if (debug | localDebug) System.out.println("TlvDisassemble.getTlvDictionary() - Using TlvVariableBinding Class"); liTlvBuffer = tvbTlvBuffer.getTopLevelTlvList(); } if (debug | localDebug) System.out.println("TlvDisassemble.getTlvDictionary() - TLV-LIST: " + liTlvBuffer); //Get Major TLV from tbTlvBuffer via Map Type -> Byte Length for (Integer iType : liTlvBuffer) { //mgarcia - Added 140103 - Need a new Buffer for each Type Look up boasTlvBufferTemp = new ByteArrayOutputStream(); if (debug | localDebug) System.out.println("TlvDisassemble.getTlvDictionary() MAJOR-TLV-TYPE: " + iType); //Get TLV Dictionary via TLV Type joTlvDictionary = super.getTlvDictionary(iType); if (debug | localDebug) System.out.println( "TlvDisassemble.getTlvDictionary.joTlvDictionary(" + iType + "): " + joTlvDictionary); if (joTlvDictionary == null) { if (debug | localDebug) System.out.println("TlvDisassemble.getTlvDictionary() -> INVALID-TLV: " + iType); continue; } //Create Map to hold TYPE to Byte Length Map<Integer, Integer> miiTypeByteLength = new HashMap<Integer, Integer>(); //need to account for VariableNumByteLength TLV's if (tvbTlvBuffer != null) { miiTypeByteLength.put(iType, tvbTlvBuffer.getMapTypeToByteLength().get(iType)); //fetch Major TLV and store in BASO try { boasTlvBufferTemp.write(TlvBuilder.fetchTlv(new ArrayList<Integer>(Arrays.asList(iType)), //Contains Type miiTypeByteLength, //Contains Type -> ByteLength tvbTlvBuffer.toByteArray())); //Contains TLV ByteArray } catch (IOException e) { e.printStackTrace(); } try { //Update ByteLength for this TLV try { joTlvDictionary.put(Dictionary.BYTE_LENGTH, miiTypeByteLength.get(iType)); } catch (JSONException e) { e.printStackTrace(); } //Load JSON Object jaTlvDictionary .put(loadTlvValuesIntoTlvDictionary(tvbTlvBuffer.toByteArray(), joTlvDictionary)); } catch (TlvException e) { e.printStackTrace(); } if (debug | localDebug) { System.out.println("TlvDisassemble.getTlvDictionary() VAR-BIND-TLV-BUFFER: " + tvbTlvBuffer); System.out.println("TlvDisassemble.getTlvDictionary() joTlvDictionary: " + joTlvDictionary); } } else if (tbTlvBuffer != null) { //Map to hold TYPE to Byte Length try { miiTypeByteLength.put(joTlvDictionary.getInt(Dictionary.TYPE), joTlvDictionary.getInt(Dictionary.BYTE_LENGTH)); } catch (JSONException e1) { e1.printStackTrace(); } if (debug | localDebug) { System.out.println("TlvDisassemble.getTlvDictionary() MapTypeToByteLen: " + miiTypeByteLength); } //fetch Major TLV and store in BASO try { boasTlvBufferTemp.write(TlvBuilder.fetchTlv(new ArrayList<Integer>(Arrays.asList(iType)), //Contains Type miiTypeByteLength, //Contains Type -> ByteLength tbTlvBuffer.toByteArray())); //Contains TLV ByteArray } catch (IOException e) { e.printStackTrace(); } if (debug | localDebug) { System.out.println("TlvDisassemble.getTlvDictionary() - BEFORE-LOAD-TLV - TLV-BUFFER-LENGTH: " + boasTlvBufferTemp.size()); System.out.println("TlvDisassemble.getTlvDictionary() - BEFORE-LOAD-TLV - TLV-BUFFER: " + new HexString(boasTlvBufferTemp.toByteArray()).toString(":")); System.out.println("TlvDisassemble.getTlvDictionary() - BEFORE-LOAD-TLV - joTlvDictionary: " + joTlvDictionary); } try { jaTlvDictionary .put(loadTlvValuesIntoTlvDictionary(boasTlvBufferTemp.toByteArray(), joTlvDictionary)); } catch (TlvException e) { e.printStackTrace(); } if (debug | localDebug) { System.out.println("TlvDisassemble.getTlvDictionary() - AFTER-LOAD-TLV - TLV-BUFFER-LENGTH: " + new HexString(boasTlvBufferTemp.toByteArray()).toString(":")); System.out.println("TlvDisassemble.getTlvDictionary() - AFTER-LOAD-TLV - joTlvDictionary: " + joTlvDictionary); } } } return jaTlvDictionary; }
From source file:com.portfolio.data.attachment.XSLService.java
@Override protected void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException { /**//from w w w .j av a 2s . c om * Format demand: * <convert> * <portfolioid>{uuid}</portfolioid> * <portfolioid>{uuid}</portfolioid> * <nodeid>{uuid}</nodeid> * <nodeid>{uuid}</nodeid> * <documentid>{uuid}</documentid> * <xsl>{rpertoire}{fichier}</xsl> * <format>[pdf rtf xml ...]</format> * <parameters> * <maVar1>lala</maVar1> * ... * </parameters> * </convert> */ try { //On initialise le dataProvider Connection c = null; //On initialise le dataProvider if (ds == null) // Case where we can't deploy context.xml { c = getConnection(); } else { c = ds.getConnection(); } dataProvider.setConnection(c); credential = new Credential(c); } catch (Exception e) { e.printStackTrace(); } String origin = request.getRequestURL().toString(); /// Variable stuff int userId = 0; int groupId = 0; String user = ""; HttpSession session = request.getSession(true); if (session != null) { Integer val = (Integer) session.getAttribute("uid"); if (val != null) userId = val; val = (Integer) session.getAttribute("gid"); if (val != null) groupId = val; user = (String) session.getAttribute("user"); } /// TODO: A voire si un form get ne ferait pas l'affaire aussi /// On lis le xml /* BufferedReader rd = new BufferedReader(new InputStreamReader(request.getInputStream())); StringBuilder sb = new StringBuilder(); String line; while( (line = rd.readLine()) != null ) sb.append(line); DocumentBuilderFactory documentBuilderFactory =DocumentBuilderFactory.newInstance(); DocumentBuilder documentBuilder = null; Document doc=null; try { documentBuilder = documentBuilderFactory.newDocumentBuilder(); doc = documentBuilder.parse(new ByteArrayInputStream(sb.toString().getBytes("UTF-8"))); } catch( Exception e ) { e.printStackTrace(); } /// On lit les paramtres NodeList portfolioNode = doc.getElementsByTagName("portfolioid"); NodeList nodeNode = doc.getElementsByTagName("nodeid"); NodeList documentNode = doc.getElementsByTagName("documentid"); NodeList xslNode = doc.getElementsByTagName("xsl"); NodeList formatNode = doc.getElementsByTagName("format"); NodeList parametersNode = doc.getElementsByTagName("parameters"); //*/ // String xslfile = xslNode.item(0).getTextContent(); String xslfile = request.getParameter("xsl"); String format = request.getParameter("format"); // String format = formatNode.item(0).getTextContent(); String parameters = request.getParameter("parameters"); String documentid = request.getParameter("documentid"); String portfolios = request.getParameter("portfolioids"); String[] portfolioid = null; if (portfolios != null) portfolioid = portfolios.split(";"); String nodes = request.getParameter("nodeids"); String[] nodeid = null; if (nodes != null) nodeid = nodes.split(";"); System.out.println("PARAMETERS: "); System.out.println("xsl: " + xslfile); System.out.println("format: " + format); System.out.println("user: " + userId); System.out.println("portfolioids: " + portfolios); System.out.println("nodeids: " + nodes); System.out.println("parameters: " + parameters); boolean redirectDoc = false; if (documentid != null) { redirectDoc = true; System.out.println("documentid @ " + documentid); } boolean usefop = false; String ext = ""; if (MimeConstants.MIME_PDF.equals(format)) { usefop = true; ext = ".pdf"; } else if (MimeConstants.MIME_RTF.equals(format)) { usefop = true; ext = ".rtf"; } //// Paramtre portfolio-uuid et file-xsl // String uuid = request.getParameter("uuid"); // String xslfile = request.getParameter("xsl"); StringBuilder aggregate = new StringBuilder(); try { int portcount = 0; int nodecount = 0; // On aggrge les donnes if (portfolioid != null) { portcount = portfolioid.length; for (int i = 0; i < portfolioid.length; ++i) { String p = portfolioid[i]; String portfolioxml = dataProvider .getPortfolio(new MimeType("text/xml"), p, userId, groupId, "", null, null, 0) .toString(); aggregate.append(portfolioxml); } } if (nodeid != null) { nodecount = nodeid.length; for (int i = 0; i < nodeid.length; ++i) { String n = nodeid[i]; String nodexml = dataProvider.getNode(new MimeType("text/xml"), n, true, userId, groupId, "") .toString(); aggregate.append(nodexml); } } // Est-ce qu'on a eu besoin d'aggrger les donnes? String input = aggregate.toString(); String pattern = "<\\?xml[^>]*>"; // Purge previous xml declaration input = input.replaceAll(pattern, ""); input = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" + "<!DOCTYPE xsl:stylesheet [" + "<!ENTITY % lat1 PUBLIC \"-//W3C//ENTITIES Latin 1 for XHTML//EN\" \"" + servletDir + "xhtml-lat1.ent\">" + "<!ENTITY % symbol PUBLIC \"-//W3C//ENTITIES Symbols for XHTML//EN\" \"" + servletDir + "xhtml-symbol.ent\">" + "<!ENTITY % special PUBLIC \"-//W3C//ENTITIES Special for XHTML//EN\" \"" + servletDir + "xhtml-special.ent\">" + "%lat1;" + "%symbol;" + "%special;" + "]>" + // For the pesky special characters "<root>" + input + "</root>"; // System.out.println("INPUT WITH PROXY:"+ input); /// Rsolution des proxys DocumentBuilder documentBuilder; DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance(); documentBuilder = documentBuilderFactory.newDocumentBuilder(); InputSource is = new InputSource(new StringReader(input)); Document doc = documentBuilder.parse(is); /// Proxy stuff XPath xPath = XPathFactory.newInstance().newXPath(); String filterRes = "//asmResource[@xsi_type='Proxy']"; String filterCode = "./code/text()"; NodeList nodelist = (NodeList) xPath.compile(filterRes).evaluate(doc, XPathConstants.NODESET); XPathExpression codeFilter = xPath.compile(filterCode); for (int i = 0; i < nodelist.getLength(); ++i) { Node res = nodelist.item(i); Node gp = res.getParentNode(); // resource -> context -> container Node ggp = gp.getParentNode(); Node uuid = (Node) codeFilter.evaluate(res, XPathConstants.NODE); /// Fetch node we want to replace String returnValue = dataProvider .getNode(new MimeType("text/xml"), uuid.getTextContent(), true, userId, groupId, "") .toString(); Document rep = documentBuilder.parse(new InputSource(new StringReader(returnValue))); Element repNode = rep.getDocumentElement(); Node proxyNode = repNode.getFirstChild(); proxyNode = doc.importNode(proxyNode, true); // adoptNode have some weird side effect. To be banned // doc.replaceChild(proxyNode, gp); ggp.insertBefore(proxyNode, gp); // replaceChild doesn't work. ggp.removeChild(gp); } try // Convert XML document to string { DOMSource domSource = new DOMSource(doc); StringWriter writer = new StringWriter(); StreamResult result = new StreamResult(writer); TransformerFactory tf = TransformerFactory.newInstance(); Transformer transformer = tf.newTransformer(); transformer.transform(domSource, result); writer.flush(); input = writer.toString(); } catch (TransformerException ex) { ex.printStackTrace(); } // System.out.println("INPUT DATA:"+ input); // Setup a buffer to obtain the content length ByteArrayOutputStream stageout = new ByteArrayOutputStream(); ByteArrayOutputStream out = new ByteArrayOutputStream(); //// Setup Transformer (1st stage) /// Base path String basepath = xslfile.substring(0, xslfile.indexOf(File.separator)); String firstStage = baseDir + File.separator + basepath + File.separator + "karuta" + File.separator + "xsl" + File.separator + "html2xml.xsl"; System.out.println("FIRST: " + firstStage); Source xsltSrc1 = new StreamSource(new File(firstStage)); Transformer transformer1 = transFactory.newTransformer(xsltSrc1); StreamSource stageSource = new StreamSource(new ByteArrayInputStream(input.getBytes())); Result stageRes = new StreamResult(stageout); transformer1.transform(stageSource, stageRes); // Setup Transformer (2nd stage) String secondStage = baseDir + File.separator + xslfile; Source xsltSrc2 = new StreamSource(new File(secondStage)); Transformer transformer2 = transFactory.newTransformer(xsltSrc2); // Configure parameter from xml String[] table = parameters.split(";"); for (int i = 0; i < table.length; ++i) { String line = table[i]; int var = line.indexOf(":"); String par = line.substring(0, var); String val = line.substring(var + 1); transformer2.setParameter(par, val); } // Setup input StreamSource xmlSource = new StreamSource(new ByteArrayInputStream(stageout.toString().getBytes())); // StreamSource xmlSource = new StreamSource(new File(baseDir+origin, "projectteam.xml") ); Result res = null; if (usefop) { /// FIXME: Might need to include the entity for html stuff? //Setup FOP //Make sure the XSL transformation's result is piped through to FOP Fop fop = fopFactory.newFop(format, out); res = new SAXResult(fop.getDefaultHandler()); //Start the transformation and rendering process transformer2.transform(xmlSource, res); } else { res = new StreamResult(out); //Start the transformation and rendering process transformer2.transform(xmlSource, res); } if (redirectDoc) { // /resources/resource/file/{uuid}[?size=[S|L]&lang=[fr|en]] String urlTarget = "http://" + server + "/resources/resource/file/" + documentid; System.out.println("Redirect @ " + urlTarget); HttpClientBuilder clientbuilder = HttpClientBuilder.create(); CloseableHttpClient client = clientbuilder.build(); HttpPost post = new HttpPost(urlTarget); post.addHeader("referer", origin); String sessionid = request.getSession().getId(); System.out.println("Session: " + sessionid); post.addHeader("Cookie", "JSESSIONID=" + sessionid); MultipartEntityBuilder builder = MultipartEntityBuilder.create(); builder.setMode(HttpMultipartMode.BROWSER_COMPATIBLE); ByteArrayBody body = new ByteArrayBody(out.toByteArray(), "generated" + ext); builder.addPart("uploadfile", body); HttpEntity entity = builder.build(); post.setEntity(entity); HttpResponse ret = client.execute(post); String stringret = new BasicResponseHandler().handleResponse(ret); int code = ret.getStatusLine().getStatusCode(); response.setStatus(code); ServletOutputStream output = response.getOutputStream(); output.write(stringret.getBytes(), 0, stringret.length()); output.close(); client.close(); /* HttpURLConnection connection = CreateConnection( urlTarget, request ); /// Helping construct Json connection.setRequestProperty("referer", origin); /// Send post data ServletInputStream inputData = request.getInputStream(); DataOutputStream writer = new DataOutputStream(connection.getOutputStream()); byte[] buffer = new byte[1024]; int dataSize; while( (dataSize = inputData.read(buffer,0,buffer.length)) != -1 ) { writer.write(buffer, 0, dataSize); } inputData.close(); writer.close(); RetrieveAnswer(connection, response, origin); //*/ } else { response.reset(); response.setHeader("Content-Disposition", "attachment; filename=generated" + ext); response.setContentType(format); response.setContentLength(out.size()); response.getOutputStream().write(out.toByteArray()); response.getOutputStream().flush(); } } catch (Exception e) { String message = e.getMessage(); response.setStatus(500); response.getOutputStream().write(message.getBytes()); response.getOutputStream().close(); e.printStackTrace(); } finally { dataProvider.disconnect(); } }
From source file:com.temenos.interaction.media.odata.xml.atom.AtomXMLProvider.java
/** * Writes a Atom (OData) representation of {@link EntityResource} to the output stream. * /*from w w w. j a v a 2s . c o m*/ * @precondition supplied {@link EntityResource} is non null * @precondition {@link EntityResource#getEntity()} returns a valid OEntity, this * provider only supports serialising OEntities * @postcondition non null Atom (OData) XML document written to OutputStream * @invariant valid OutputStream */ @SuppressWarnings("unchecked") @Override public void writeTo(RESTResource resource, Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType, MultivaluedMap<String, Object> httpHeaders, OutputStream entityStream) throws IOException, WebApplicationException { assert resource != null; assert uriInfo != null; ByteArrayOutputStream buffer = new ByteArrayOutputStream(); RESTResource restResource = processLinks((RESTResource) resource); Collection<Link> processedLinks = restResource.getLinks(); if (ResourceTypeHelper.isType(type, genericType, EntityResource.class, OEntity.class)) { EntityResource<OEntity> entityResource = (EntityResource<OEntity>) resource; OEntity tempEntity = entityResource.getEntity(); EdmEntitySet entitySet = getEdmEntitySet(entityResource.getEntityName()); List<OLink> olinks = formOLinks(entityResource); //Write entry // create OEntity with our EdmEntitySet see issue https://github.com/aphethean/IRIS/issues/20 OEntity oentity = OEntities.create(entitySet, tempEntity.getEntityKey(), tempEntity.getProperties(), null); entryWriter.write(uriInfo, new OutputStreamWriter(buffer, UTF_8), Responses.entity(oentity), entitySet, olinks); } else if (ResourceTypeHelper.isType(type, genericType, EntityResource.class, Entity.class)) { EntityResource<Entity> entityResource = (EntityResource<Entity>) resource; //Write entry Entity entity = entityResource.getEntity(); String entityName = entityResource.getEntityName(); // Write Entity object with Abdera implementation entityEntryWriter.write(uriInfo, new OutputStreamWriter(buffer, UTF_8), entityName, entity, processedLinks, entityResource.getEmbedded()); } else if (ResourceTypeHelper.isType(type, genericType, EntityResource.class)) { EntityResource<Object> entityResource = (EntityResource<Object>) resource; //Links and entity properties Object entity = entityResource.getEntity(); String entityName = entityResource.getEntityName(); EntityProperties props = new EntityProperties(); if (entity != null) { Map<String, Object> objProps = (transformer != null ? transformer : new BeanTransformer()) .transform(entity); if (objProps != null) { for (String propName : objProps.keySet()) { props.setProperty(new EntityProperty(propName, objProps.get(propName))); } } } entityEntryWriter.write(uriInfo, new OutputStreamWriter(buffer, UTF_8), entityName, new Entity(entityName, props), processedLinks, entityResource.getEmbedded()); } else if (ResourceTypeHelper.isType(type, genericType, CollectionResource.class, OEntity.class)) { CollectionResource<OEntity> collectionResource = ((CollectionResource<OEntity>) resource); EdmEntitySet entitySet = getEdmEntitySet(collectionResource.getEntityName()); List<EntityResource<OEntity>> collectionEntities = (List<EntityResource<OEntity>>) collectionResource .getEntities(); List<OEntity> entities = new ArrayList<OEntity>(); Map<OEntity, Collection<Link>> linkId = new HashMap<OEntity, Collection<Link>>(); for (EntityResource<OEntity> collectionEntity : collectionEntities) { // create OEntity with our EdmEntitySet see issue https://github.com/aphethean/IRIS/issues/20 OEntity tempEntity = collectionEntity.getEntity(); List<OLink> olinks = formOLinks(collectionEntity); Collection<Link> links = collectionEntity.getLinks(); OEntity entity = OEntities.create(entitySet, null, tempEntity.getEntityKey(), tempEntity.getEntityTag(), tempEntity.getProperties(), olinks); entities.add(entity); linkId.put(entity, links); } // TODO implement collection properties and get transient value for skiptoken Integer inlineCount = collectionResource.getInlineCount(); String skipToken = null; feedWriter.write(uriInfo, new OutputStreamWriter(buffer, UTF_8), processedLinks, Responses.entities(entities, entitySet, inlineCount, skipToken), metadata.getModelName(), linkId); } else if (ResourceTypeHelper.isType(type, genericType, CollectionResource.class, Entity.class)) { CollectionResource<Entity> collectionResource = ((CollectionResource<Entity>) resource); // TODO implement collection properties and get transient value for skiptoken Integer inlineCount = collectionResource.getInlineCount(); String skipToken = null; //Write feed AtomEntityFeedFormatWriter entityFeedWriter = new AtomEntityFeedFormatWriter(serviceDocument, metadata); entityFeedWriter.write(uriInfo, new OutputStreamWriter(buffer, UTF_8), collectionResource, inlineCount, skipToken, metadata.getModelName()); } else { LOGGER.error("Accepted object for writing in isWriteable, but type not supported in writeTo method"); throw new WebApplicationException(Response.Status.INTERNAL_SERVER_ERROR); } //Set response headers if (httpHeaders != null) { httpHeaders.putSingle(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_ATOM_XML); //Workaround for https://issues.apache.org/jira/browse/WINK-374 httpHeaders.putSingle(HttpHeaders.CONTENT_LENGTH, Integer.toString(buffer.size())); } IOUtils.copy(new ByteArrayInputStream(buffer.toByteArray()), entityStream); }
From source file:ee.sk.digidoc.DataFile.java
/** * Calculates the DataFiles size and digest * Since it calculates the digest of the external file * then this is only useful for detatched files * @throws DigiDocException for all errors *///from w ww . j a v a2 s .c om public void calculateFileSizeAndDigest(OutputStream os) throws DigiDocException { if (m_logger.isDebugEnabled()) m_logger.debug("calculateFileSizeAndDigest(" + getId() + ") body: " + ((m_body != null) ? "OK" : "NULL") + " base64: " + m_bodyIsBase64 + " DF cache: " + ((m_fDfCache != null) ? m_fDfCache.getAbsolutePath() : "NULL")); FileInputStream fis = null; if (m_contentType.equals(CONTENT_BINARY)) { InputStream is = null; try { if (getDfCacheFile() != null) is = getBodyAsStream(); else if (is == null && m_body != null) is = new java.io.ByteArrayInputStream(m_body); else if (is == null && m_fileName != null) is = new java.io.FileInputStream(m_fileName); if (is != null) calcHashes(is); } catch (java.io.FileNotFoundException ex) { throw new DigiDocException(DigiDocException.ERR_READ_FILE, "Cannot read file: " + m_fileName, null); } finally { try { if (is != null) is.close(); } catch (Exception ex) { m_logger.error("Error closing stream: " + ex); } } return; } MessageDigest sha = null; boolean bUse64ByteLines = true; String use64Flag = ConfigManager.instance().getProperty("DATAFILE_USE_64BYTE_LINES"); if (use64Flag != null && use64Flag.equalsIgnoreCase("FALSE")) bUse64ByteLines = false; try { sha = MessageDigest.getInstance("SHA-1"); // TODO: fix digest type // if DataFile's digest has already been initialized // and body in memory, e.g. has been read from digidoc // then write directly to output stream and don't calculate again if (m_origDigestValue != null && m_body != null && os != null) { os.write(xmlHeader()); if (m_logger.isDebugEnabled()) m_logger.debug("write df header1: " + xmlHeader()); os.write(m_body); os.write(xmlTrailer()); return; } String longFileName = m_fileName; File fIn = new File(m_fileName); m_fileName = fIn.getName(); if (fIn.canRead() && m_fDfCache == null) { fis = new FileInputStream(longFileName); if (m_logger.isDebugEnabled()) m_logger.debug("Read file: " + longFileName); } else if (m_fDfCache != null) { fis = new FileInputStream(m_fDfCache); if (m_logger.isDebugEnabled()) m_logger.debug("Read cache: " + m_fDfCache); } byte[] tmp1 = null, tmp2 = null, tmp3 = null; ByteArrayOutputStream sbDig = new ByteArrayOutputStream(); sbDig.write(xmlHeader()); // add trailer and canonicalize tmp3 = xmlTrailer(); sbDig.write(tmp3); tmp1 = canonicalizeXml(sbDig.toByteArray()); // now remove the end tag again and calculate digest of the start tag only if (tmp1 != null) { tmp2 = new byte[tmp1.length - tmp3.length]; System.arraycopy(tmp1, 0, tmp2, 0, tmp2.length); sha.update(tmp2); if (os != null) os.write(xmlHeader()); } // reset the collecting buffer and other temp buffers sbDig = new ByteArrayOutputStream(); tmp1 = tmp2 = tmp3 = null; // content must be read from file if (m_body == null) { if (m_logger.isDebugEnabled()) m_logger.debug("Reading input file: " + ((fIn.canRead() && m_fDfCache == null) ? longFileName : ((m_fDfCache != null) ? m_fDfCache.getAbsolutePath() : "no-cache"))); byte[] buf = new byte[block_size]; byte[] b64leftover = null; int fRead = 0, b64left = 0; ByteArrayOutputStream content = null; if (m_contentType.equals(CONTENT_EMBEDDED_BASE64)) { // optimization for 64 char base64 lines // convert to base64 online at a time to conserve memory // VS: DF temp file base64 decoding fix if (m_fDfCache == null) { if (bUse64ByteLines) b64leftover = new byte[65]; else content = new ByteArrayOutputStream(); } } while ((fRead = fis.read(buf)) > 0 || b64left > 0) { // read input file if (m_logger.isDebugEnabled()) m_logger.debug("read: " + fRead + " bytes of input data"); if (m_contentType.equals(CONTENT_EMBEDDED_BASE64)) { // VS: DF temp file base64 decoding fix if (m_fDfCache != null) { if (os != null) os.write(buf, 0, fRead); sha.update(buf, 0, fRead); } else { if (bUse64ByteLines) { // 1 line base64 optimization b64left = calculateAndWriteBase64Block(os, sha, b64leftover, b64left, buf, fRead, fRead < block_size); } else { // no optimization content.write(buf, 0, fRead); } } } else { if (fRead < buf.length) { tmp2 = new byte[fRead]; System.arraycopy(buf, 0, tmp2, 0, fRead); tmp1 = ConvertUtils.data2utf8(tmp2, m_codepage); } else tmp1 = ConvertUtils.data2utf8(buf, m_codepage); sbDig.write(tmp1); } if (m_logger.isDebugEnabled()) m_logger.debug( "End using block: " + fRead + " in: " + ((fis != null) ? fis.available() : 0)); } // end reading input file if (m_contentType.equals(CONTENT_EMBEDDED_BASE64)) { // VS: DF temp file base64 decoding fix if (!bUse64ByteLines && m_fDfCache == null) sbDig.write(Base64Util.encode(content.toByteArray(), 0).getBytes()); content = null; } if (m_logger.isDebugEnabled()) m_logger.debug("End reading content"); } else { // content allready in memeory if (m_logger.isDebugEnabled()) m_logger.debug("Using mem content, len: " + ((m_body != null) ? m_body.length : 0) + " b64: " + m_bodyIsBase64); if (m_body != null) { if (bUse64ByteLines && m_contentType.equals(CONTENT_EMBEDDED_BASE64) && !m_bodyIsBase64) { calculateAndWriteBase64Block(os, sha, null, 0, m_body, m_body.length, true); m_body = Base64Util.encode(m_body).getBytes(); //sbDig.write(m_body); // this code block not used any more ? } else { if (m_contentType.equals(CONTENT_EMBEDDED_BASE64) && !m_bodyIsBase64) { tmp1 = Base64Util.encode(m_body).getBytes(); } else if (m_contentType.equals(CONTENT_EMBEDDED_BASE64) && m_bodyIsBase64) { tmp1 = ConvertUtils.data2utf8(m_body, m_codepage); } else tmp1 = ConvertUtils.data2utf8(m_body, m_codepage); sbDig.write(tmp1); } } } tmp1 = null; // don't need to canonicalize base64 content ! if (m_contentType.equals(CONTENT_EMBEDDED_BASE64)) { // VS: DF temp file base64 decoding fix if (!bUse64ByteLines && m_fDfCache == null) { tmp2 = sbDig.toByteArray(); if (tmp2 != null && tmp2.length > 0) { sha.update(tmp2); if (os != null) os.write(tmp2); } } else if (m_body != null && sbDig.size() > 0) { tmp2 = sbDig.toByteArray(); if (tmp2 != null && tmp2.length > 0) { sha.update(tmp2); if (os != null) os.write(tmp2); } } } else { // canonicalize body tmp2 = sbDig.toByteArray(); if (tmp2 != null && tmp2.length > 0) { if (tmp2[0] == '<') tmp2 = canonicalizeXml(tmp2); if (tmp2 != null && tmp2.length > 0) { sha.update(tmp2); // crash if (os != null) os.write(tmp2); } } } tmp2 = null; sbDig = null; // trailer tmp1 = xmlTrailer(); sha.update(tmp1); if (os != null) os.write(tmp1); // now calculate the digest byte[] digest = sha.digest(); setDigest(digest); if (m_logger.isDebugEnabled()) m_logger.debug("DataFile: \'" + getId() + "\' length: " + getSize() + " digest: " + Base64Util.encode(digest)); m_fileName = longFileName; } catch (Exception ex) { DigiDocException.handleException(ex, DigiDocException.ERR_READ_FILE); } finally { try { if (fis != null) fis.close(); } catch (Exception ex) { m_logger.error("Error closing file: " + ex); } } }
From source file:davmail.imap.ImapConnection.java
private void handleFetch(ExchangeSession.Message message, int currentIndex, String parameters) throws IOException, MessagingException { StringBuilder buffer = new StringBuilder(); buffer.append("* ").append(currentIndex).append(" FETCH (UID ").append(message.getImapUid()); if (parameters != null) { StringTokenizer paramTokens = new StringTokenizer(parameters); while (paramTokens.hasMoreTokens()) { @SuppressWarnings({ "NonConstantStringShouldBeStringBuffer" }) String param = paramTokens.nextToken().toUpperCase(); if ("FLAGS".equals(param)) { buffer.append(" FLAGS (").append(message.getImapFlags()).append(')'); } else if ("RFC822.SIZE".equals(param)) { int size; if (parameters.indexOf("BODY.PEEK[HEADER.FIELDS (") >= 0) { // Header request, send approximate size size = message.size; } else { size = message.getMimeMessageSize(); }//from w w w. java 2s .com buffer.append(" RFC822.SIZE ").append(size); } else if ("ENVELOPE".equals(param)) { appendEnvelope(buffer, message); } else if ("BODYSTRUCTURE".equals(param)) { appendBodyStructure(buffer, message); } else if ("INTERNALDATE".equals(param) && message.date != null && message.date.length() > 0) { try { SimpleDateFormat dateParser = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"); dateParser.setTimeZone(ExchangeSession.GMT_TIMEZONE); Date date = ExchangeSession.getZuluDateFormat().parse(message.date); SimpleDateFormat dateFormatter = new SimpleDateFormat("dd-MMM-yyyy HH:mm:ss Z", Locale.ENGLISH); buffer.append(" INTERNALDATE \"").append(dateFormatter.format(date)).append('\"'); } catch (ParseException e) { throw new DavMailException("EXCEPTION_INVALID_DATE", message.date); } } else if (param.equals("RFC822") || param.startsWith("BODY[") || param.startsWith("BODY.PEEK[") || "RFC822.HEADER".equals(param)) { // get full param if (param.indexOf('[') >= 0) { StringBuilder paramBuffer = new StringBuilder(param); while (paramTokens.hasMoreTokens() && paramBuffer.indexOf("]") < 0) { paramBuffer.append(' ').append(paramTokens.nextToken()); } param = paramBuffer.toString(); } // parse buffer size int startIndex = 0; int maxSize = Integer.MAX_VALUE; int ltIndex = param.indexOf('<'); if (ltIndex >= 0) { int dotIndex = param.indexOf('.', ltIndex); if (dotIndex >= 0) { startIndex = Integer.parseInt(param.substring(ltIndex + 1, dotIndex)); maxSize = Integer.parseInt(param.substring(dotIndex + 1, param.indexOf('>'))); } } ByteArrayOutputStream baos = new ByteArrayOutputStream(); InputStream partInputStream = null; OutputStream partOutputStream = null; // try to parse message part index String partIndexString = StringUtil.getToken(param, "[", "]"); if ("".equals(partIndexString) || partIndexString == null) { // write message with headers partOutputStream = new PartialOutputStream(baos, startIndex, maxSize); partInputStream = message.getRawInputStream(); } else if ("TEXT".equals(partIndexString)) { // write message without headers partOutputStream = new PartialOutputStream(baos, startIndex, maxSize); partInputStream = message.getMimeMessage().getRawInputStream(); } else if ("RFC822.HEADER".equals(param) || partIndexString.startsWith("HEADER")) { // Header requested fetch headers String[] requestedHeaders = getRequestedHeaders(partIndexString); if (requestedHeaders != null) { // OSX Lion special flags request if (requestedHeaders.length == 1 && "content-class".equals(requestedHeaders[0]) && message.contentClass != null) { baos.write("Content-class: ".getBytes("UTF-8")); baos.write(message.contentClass.getBytes("UTF-8")); baos.write(13); baos.write(10); } else { Enumeration headerEnumeration = message.getMatchingHeaderLines(requestedHeaders); while (headerEnumeration.hasMoreElements()) { baos.write(((String) headerEnumeration.nextElement()).getBytes("UTF-8")); baos.write(13); baos.write(10); } } } else { // write headers only partOutputStream = new PartOutputStream(baos, true, false, startIndex, maxSize); partInputStream = message.getRawInputStream(); } } else { MimePart bodyPart = message.getMimeMessage(); String[] partIndexStrings = partIndexString.split("\\."); for (String subPartIndexString : partIndexStrings) { // ignore MIME subpart index, will return full part if ("MIME".equals(subPartIndexString)) { break; } int subPartIndex; // try to parse part index try { subPartIndex = Integer.parseInt(subPartIndexString); } catch (NumberFormatException e) { throw new DavMailException("EXCEPTION_INVALID_PARAMETER", param); } Object mimeBody = bodyPart.getContent(); if (mimeBody instanceof MimeMultipart) { MimeMultipart multiPart = (MimeMultipart) mimeBody; if (subPartIndex - 1 < multiPart.getCount()) { bodyPart = (MimePart) multiPart.getBodyPart(subPartIndex - 1); } else { throw new DavMailException("EXCEPTION_INVALID_PARAMETER", param); } } else if (subPartIndex != 1) { throw new DavMailException("EXCEPTION_INVALID_PARAMETER", param); } } // write selected part, without headers partOutputStream = new PartialOutputStream(baos, startIndex, maxSize); if (bodyPart instanceof MimeMessage) { partInputStream = ((MimeMessage) bodyPart).getRawInputStream(); } else { partInputStream = ((MimeBodyPart) bodyPart).getRawInputStream(); } } // copy selected content to baos if (partInputStream != null && partOutputStream != null) { IOUtil.write(partInputStream, partOutputStream); partInputStream.close(); partOutputStream.close(); } baos.close(); if ("RFC822.HEADER".equals(param)) { buffer.append(" RFC822.HEADER "); } else { buffer.append(" BODY[").append(partIndexString).append(']'); } // partial if (startIndex > 0 || maxSize != Integer.MAX_VALUE) { buffer.append('<').append(startIndex).append('>'); } buffer.append(" {").append(baos.size()).append('}'); sendClient(buffer.toString()); // log content if less than 2K if (LOGGER.isDebugEnabled() && baos.size() < 2048) { LOGGER.debug(new String(baos.toByteArray(), "UTF-8")); } os.write(baos.toByteArray()); os.flush(); buffer.setLength(0); } } } buffer.append(')'); sendClient(buffer.toString()); // do not keep message content in memory message.dropMimeMessage(); }
From source file:davmail.exchange.dav.DavExchangeSession.java
/** * @inheritDoc/*from ww w . j a va 2s. c o m*/ */ @Override protected byte[] getContent(ExchangeSession.Message message) throws IOException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); InputStream contentInputStream; try { try { try { contentInputStream = getContentInputStream(message.messageUrl); } catch (UnknownHostException e) { // failover for misconfigured Exchange server, replace host name in url restoreHostName = true; contentInputStream = getContentInputStream(message.messageUrl); } } catch (HttpNotFoundException e) { LOGGER.debug("Message not found at: " + message.messageUrl + ", retrying with permanenturl"); contentInputStream = getContentInputStream(message.permanentUrl); } try { IOUtil.write(contentInputStream, baos); } finally { contentInputStream.close(); } } catch (LoginTimeoutException e) { // throw error on expired session LOGGER.warn(e.getMessage()); throw e; } catch (IOException e) { LOGGER.warn("Broken message at: " + message.messageUrl + " permanentUrl: " + message.permanentUrl + ", trying to rebuild from properties"); try { DavPropertyNameSet messageProperties = new DavPropertyNameSet(); messageProperties.add(Field.getPropertyName("contentclass")); messageProperties.add(Field.getPropertyName("message-id")); messageProperties.add(Field.getPropertyName("from")); messageProperties.add(Field.getPropertyName("to")); messageProperties.add(Field.getPropertyName("cc")); messageProperties.add(Field.getPropertyName("subject")); messageProperties.add(Field.getPropertyName("date")); messageProperties.add(Field.getPropertyName("htmldescription")); messageProperties.add(Field.getPropertyName("body")); PropFindMethod propFindMethod = new PropFindMethod(encodeAndFixUrl(message.permanentUrl), messageProperties, 0); DavGatewayHttpClientFacade.executeMethod(httpClient, propFindMethod); MultiStatus responses = propFindMethod.getResponseBodyAsMultiStatus(); if (responses.getResponses().length > 0) { MimeMessage mimeMessage = new MimeMessage((Session) null); DavPropertySet properties = responses.getResponses()[0].getProperties(HttpStatus.SC_OK); String propertyValue = getPropertyIfExists(properties, "contentclass"); if (propertyValue != null) { mimeMessage.addHeader("Content-class", propertyValue); } propertyValue = getPropertyIfExists(properties, "date"); if (propertyValue != null) { mimeMessage.setSentDate(parseDateFromExchange(propertyValue)); } propertyValue = getPropertyIfExists(properties, "from"); if (propertyValue != null) { mimeMessage.addHeader("From", propertyValue); } propertyValue = getPropertyIfExists(properties, "to"); if (propertyValue != null) { mimeMessage.addHeader("To", propertyValue); } propertyValue = getPropertyIfExists(properties, "cc"); if (propertyValue != null) { mimeMessage.addHeader("Cc", propertyValue); } propertyValue = getPropertyIfExists(properties, "subject"); if (propertyValue != null) { mimeMessage.setSubject(propertyValue); } propertyValue = getPropertyIfExists(properties, "htmldescription"); if (propertyValue != null) { mimeMessage.setContent(propertyValue, "text/html; charset=UTF-8"); } else { propertyValue = getPropertyIfExists(properties, "body"); if (propertyValue != null) { mimeMessage.setText(propertyValue); } } mimeMessage.writeTo(baos); } if (LOGGER.isDebugEnabled()) { LOGGER.debug("Rebuilt message content: " + new String(baos.toByteArray())); } } catch (IOException e2) { LOGGER.warn(e2); } catch (DavException e2) { LOGGER.warn(e2); } catch (MessagingException e2) { LOGGER.warn(e2); } // other exception if (baos.size() == 0 && Settings.getBooleanProperty("davmail.deleteBroken")) { LOGGER.warn("Deleting broken message at: " + message.messageUrl + " permanentUrl: " + message.permanentUrl); try { message.delete(); } catch (IOException ioe) { LOGGER.warn("Unable to delete broken message at: " + message.permanentUrl); } throw e; } } return baos.toByteArray(); }