List of usage examples for java.io StringWriter close
public void close() throws IOException
From source file:com.ibm.sbt.test.lib.MockSerializer.java
public synchronized HttpResponse recordResponse(HttpResponse response) { try {// w w w . j ava2 s .co m StringWriter out = new StringWriter(); out.write("\n<response "); out.write("statusCode=\""); int statusCode = response.getStatusLine().getStatusCode(); out.write(String.valueOf(statusCode).trim()); out.write("\" "); out.write("statusReason=\""); String reasonPhrase = response.getStatusLine().getReasonPhrase(); out.write(String.valueOf(reasonPhrase).trim()); out.write("\">\n"); out.write("<headers>"); Header[] allHeaders = response.getAllHeaders(); out.write(serialize(allHeaders)); String serializedEntity = null; if (response.getEntity() != null) { out.write("</headers>\n<data><![CDATA["); serializedEntity = serialize(response.getEntity().getContent()); serializedEntity = serializedEntity.replaceAll("<!\\[CDATA\\[", "\\!\\[CDATA\\[") .replaceAll("\\]\\]>", "\\]\\]"); out.write(serializedEntity); out.write("]]></data>\n</response>"); } else { out.write("</headers>\n</response>"); } out.flush(); out.close(); writeData(out.toString()); return buildResponse(allHeaders, statusCode, reasonPhrase, serializedEntity); } catch (IOException e) { throw new UnsupportedOperationException(e); } }
From source file:org.eclipse.virgo.ide.bundlor.internal.core.BundlorProjectBuilder.java
private InputStream formatManifest(IFile file, InputStream manifestInput) throws IOException { StringWriter writer = new StringWriter(); SpringBundleModel model = new SpringBundleModel("", true); SpringBundleModelFactory factory = new SpringBundleModelFactory(model); try {/*www . j a v a2 s. c om*/ Map headers = ManifestElement.parseBundleManifest(manifestInput, null); for (Object obj : headers.keySet()) { String key = (String) obj; String value = (String) headers.get(key); ManifestHeader header = (ManifestHeader) factory.createHeader(key, value); header.update(false); String result = header.write(); writer.write(result); } } catch (BundleException e) { } String manifestOutput = writer.toString(); writer.close(); manifestInput.close(); model.dispose(); return new ByteArrayInputStream(manifestOutput.getBytes()); }
From source file:de.tu_dortmund.ub.data.dswarm.Init.java
/** * creates a data model from given resource + configuration JSON (+ optional input schema) * * @param resourceJSON/*w w w . j av a2 s. c om*/ * @param configurationJSON * @param optionalInputSchema * @param name * @param description * @return responseJson * @throws Exception */ private String createDataModel(final JsonObject resourceJSON, final JsonObject configurationJSON, final Optional<JsonObject> optionalInputSchema, final String name, final String description, final String serviceName, final String engineDswarmAPI, final boolean doIngest) throws Exception { try (final CloseableHttpClient httpclient = HttpClients.createDefault()) { final String uri = engineDswarmAPI + DswarmBackendStatics.DATAMODELS_ENDPOINT + APIStatics.QUESTION_MARK + DswarmBackendStatics.DO_DATA_MODEL_INGEST_IDENTIFIER + APIStatics.EQUALS + doIngest; final HttpPost httpPost = new HttpPost(uri); final StringWriter stringWriter = new StringWriter(); final JsonGenerator jp = Json.createGenerator(stringWriter); jp.writeStartObject(); jp.write(DswarmBackendStatics.NAME_IDENTIFIER, name); jp.write(DswarmBackendStatics.DESCRIPTION_IDENTIFIER, description); jp.write(CONFIGURATION_IDENTIFIER, configurationJSON); jp.write(DswarmBackendStatics.DATA_RESOURCE_IDENTIFIER, resourceJSON); if (optionalInputSchema.isPresent()) { LOG.info("[{}][{}] add existing input schema to input data model", serviceName, cnt); jp.write(DswarmBackendStatics.SCHEMA_IDENTIFIER, optionalInputSchema.get()); } jp.writeEnd(); jp.flush(); jp.close(); final StringEntity reqEntity = new StringEntity(stringWriter.toString(), ContentType.create(APIStatics.APPLICATION_JSON_MIMETYPE, Consts.UTF_8)); stringWriter.flush(); stringWriter.close(); httpPost.setEntity(reqEntity); LOG.info(String.format("[%s][%d] request : %s", serviceName, cnt, httpPost.getRequestLine())); try (final CloseableHttpResponse httpResponse = httpclient.execute(httpPost)) { final int statusCode = httpResponse.getStatusLine().getStatusCode(); final String message = String.format("[%s][%d] %d : %s", serviceName, cnt, statusCode, httpResponse.getStatusLine().getReasonPhrase()); final String response = TPUUtil.getResponseMessage(httpResponse); switch (statusCode) { case 201: { LOG.info(message); LOG.debug(String.format("[%s][%d] responseJson : %s", serviceName, cnt, response)); return response; } default: { LOG.error(message); throw new Exception("something went wrong at data model creation: " + message + " " + response); } } } } }
From source file:br.org.indt.ndg.server.client.TemporaryOpenRosaBussinessDelegate.java
/********** Uploading OpenRosa Surveys and Results **********/ public boolean parseAndPersistSurvey(InputStreamReader inputStreamReader, String contentType) throws IOException { String surveyString = parseMultipartEncodedFile(inputStreamReader, contentType, "filename"); String surveyId = null;//from ww w .ja v a2s .c o m String surveyIdOriginal = null; Document surveyDomDocument = null; ByteArrayInputStream streamToParse = new ByteArrayInputStream(surveyString.getBytes("UTF-8")); try { surveyDomDocument = DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(streamToParse); } catch (SAXException e) { e.printStackTrace(); return false; } catch (ParserConfigurationException e) { e.printStackTrace(); return false; } finally { streamToParse.close(); } NodeList dataNodeList = surveyDomDocument.getElementsByTagName("data"); if (dataNodeList.getLength() != 1) { return false; // there MUST be exactly 1 <data> node } else { Element dataElement = (Element) dataNodeList.item(0); Random rand = new Random(System.currentTimeMillis()); int newId = rand.nextInt(Integer.MAX_VALUE); surveyId = String.valueOf(newId); surveyIdOriginal = dataElement.getAttribute("id"); dataElement.setAttribute("id", String.valueOf(newId)); StringWriter stringWriter = null; try { Source source = new DOMSource(surveyDomDocument); stringWriter = new StringWriter(); Result result = new StreamResult(stringWriter); TransformerFactory factory = TransformerFactory.newInstance(); Transformer transformer = factory.newTransformer(); transformer.setOutputProperty(OutputKeys.INDENT, "no"); transformer.setOutputProperty(OutputKeys.ENCODING, "UTF-8"); transformer.setOutputProperty(OutputKeys.OMIT_XML_DECLARATION, "yes"); transformer.setOutputProperty(OutputKeys.METHOD, "xml"); transformer.transform(source, result); surveyString = stringWriter.getBuffer().toString(); } catch (Exception e) { e.printStackTrace(); return false; } finally { stringWriter.close(); } log.info("========================"); log.info("Original Survey Id: " + surveyIdOriginal); log.info("New Survey Id: " + surveyId); log.info("========================"); } return persistSurvey(surveyString, surveyId, surveyIdOriginal); }
From source file:com.bluexml.side.integration.buildHudson.utils.Utils.java
/** * Mthode qui retourne le contenu du fichier pass en paramtre * //from w w w . jav a2s . c o m * @param f * Le fichier a retourner * @return Le contenu du fichier */ private static String loadFile(File f) { StringWriter out = null; try { BufferedInputStream in = new BufferedInputStream(new FileInputStream(f)); out = new StringWriter(); int b; while ((b = in.read()) != -1) out.write(b); out.flush(); out.close(); in.close(); return out.toString(); } catch (IOException ie) { ie.printStackTrace(); } return out.toString(); }
From source file:com.hichinaschool.flashcards.libanki.sync.BasicHttpSyncer.java
public HttpResponse req(String method, InputStream fobj, int comp, boolean hkey, JSONObject registerData, Connection.CancelCallback cancelCallback) { File tmpFileBuffer = null;/*from w w w.j ava 2 s . c o m*/ try { String bdry = "--" + BOUNDARY; StringWriter buf = new StringWriter(); HashMap<String, Object> vars = new HashMap<String, Object>(); // compression flag and session key as post vars vars.put("c", comp != 0 ? 1 : 0); if (hkey) { vars.put("k", mHKey); vars.put("s", mSKey); } for (String key : vars.keySet()) { buf.write(bdry + "\r\n"); buf.write(String.format(Locale.US, "Content-Disposition: form-data; name=\"%s\"\r\n\r\n%s\r\n", key, vars.get(key))); } tmpFileBuffer = File.createTempFile("syncer", ".tmp", new File(AnkiDroidApp.getCacheStorageDirectory())); FileOutputStream fos = new FileOutputStream(tmpFileBuffer); BufferedOutputStream bos = new BufferedOutputStream(fos); GZIPOutputStream tgt; // payload as raw data or json if (fobj != null) { // header buf.write(bdry + "\r\n"); buf.write( "Content-Disposition: form-data; name=\"data\"; filename=\"data\"\r\nContent-Type: application/octet-stream\r\n\r\n"); buf.close(); bos.write(buf.toString().getBytes("UTF-8")); // write file into buffer, optionally compressing int len; BufferedInputStream bfobj = new BufferedInputStream(fobj); byte[] chunk = new byte[65536]; if (comp != 0) { tgt = new GZIPOutputStream(bos); while ((len = bfobj.read(chunk)) >= 0) { tgt.write(chunk, 0, len); } tgt.close(); bos = new BufferedOutputStream(new FileOutputStream(tmpFileBuffer, true)); } else { while ((len = bfobj.read(chunk)) >= 0) { bos.write(chunk, 0, len); } } bos.write(("\r\n" + bdry + "--\r\n").getBytes("UTF-8")); } else { buf.close(); bos.write(buf.toString().getBytes("UTF-8")); } bos.flush(); bos.close(); // connection headers String url = Collection.SYNC_URL; if (method.equals("register")) { url = url + "account/signup" + "?username=" + registerData.getString("u") + "&password=" + registerData.getString("p"); } else if (method.startsWith("upgrade")) { url = url + method; } else { url = url + "sync/" + method; } HttpPost httpPost = new HttpPost(url); HttpEntity entity = new ProgressByteEntity(tmpFileBuffer); // body httpPost.setEntity(entity); httpPost.setHeader("Content-type", "multipart/form-data; boundary=" + BOUNDARY); // HttpParams HttpParams params = new BasicHttpParams(); params.setParameter(ConnManagerPNames.MAX_TOTAL_CONNECTIONS, 30); params.setParameter(ConnManagerPNames.MAX_CONNECTIONS_PER_ROUTE, new ConnPerRouteBean(30)); params.setParameter(CoreProtocolPNames.USE_EXPECT_CONTINUE, false); params.setParameter(CoreProtocolPNames.USER_AGENT, "AnkiDroid-" + AnkiDroidApp.getPkgVersionName()); HttpProtocolParams.setVersion(params, HttpVersion.HTTP_1_1); HttpConnectionParams.setSoTimeout(params, Connection.CONN_TIMEOUT); // Registry SchemeRegistry registry = new SchemeRegistry(); registry.register(new Scheme("http", PlainSocketFactory.getSocketFactory(), 80)); registry.register(new Scheme("https", new EasySSLSocketFactory(), 443)); ThreadSafeClientConnManager cm = new ThreadSafeClientConnManager(params, registry); if (cancelCallback != null) { cancelCallback.setConnectionManager(cm); } try { HttpClient httpClient = new DefaultHttpClient(cm, params); return httpClient.execute(httpPost); } catch (SSLException e) { Log.e(AnkiDroidApp.TAG, "SSLException while building HttpClient", e); return null; } } catch (UnsupportedEncodingException e) { throw new RuntimeException(e); } catch (IOException e) { Log.e(AnkiDroidApp.TAG, "BasicHttpSyncer.sync: IOException", e); return null; } catch (JSONException e) { throw new RuntimeException(e); } finally { if (tmpFileBuffer != null && tmpFileBuffer.exists()) { tmpFileBuffer.delete(); } } }
From source file:org.yamj.api.common.http.AbstractPoolingHttpClient.java
protected DigestedResponse readContent(final HttpResponse response, final Charset charset) throws IOException { StringWriter content = new StringWriter(SW_BUFFER_10K); InputStream is = response.getEntity().getContent(); InputStreamReader isr = null; BufferedReader br = null;/* w w w. ja v a 2s . c o m*/ final DigestedResponse digestedResponse = new DigestedResponse(); digestedResponse.setStatusCode(response.getStatusLine().getStatusCode()); try { if (charset == null) { isr = new InputStreamReader(is, Charset.defaultCharset()); } else { isr = new InputStreamReader(is, charset); } br = new BufferedReader(isr); String line = br.readLine(); while (line != null) { content.write(line); line = br.readLine(); } content.flush(); digestedResponse.setContent(content.toString()); return digestedResponse; } finally { if (br != null) { try { br.close(); } catch (IOException ex) { LOG.trace("Failed to close BufferedReader", ex); } } if (isr != null) { try { isr.close(); } catch (IOException ex) { LOG.trace("Failed to close InputStreamReader", ex); } } try { content.close(); } catch (IOException ex) { LOG.trace("Failed to close StringWriter", ex); } try { is.close(); } catch (IOException ex) { LOG.trace("Failed to close InputStream", ex); } } }
From source file:edu.isi.karma.controller.command.reconciliation.InvokeRubenReconciliationService.java
@Override public UpdateContainer doIt(VWorkspace vWorkspace) throws CommandException { RepFactory f = vWorkspace.getRepFactory(); Worksheet worksheet = vWorkspace.getViewFactory().getVWorksheet(vWorksheetId).getWorksheet(); Alignment alignment = AlignmentManager.Instance().getAlignment( AlignmentManager.Instance().constructAlignmentId(vWorkspace.getWorkspace().getId(), vWorksheetId)); // Set the prefix and namespace to be used while generating RDF fetchRdfPrefixAndNamespaceFromPreferences(vWorkspace); // Generate the KR2RML data structures for the RDF generation final ErrorReport errorReport = new ErrorReport(); KR2RMLMappingGenerator mappingGen = new KR2RMLMappingGenerator( vWorkspace.getWorkspace().getOntologyManager(), alignment, worksheet.getSemanticTypes(), rdfPrefix, rdfNamespace, true, errorReport); TriplesMap trMap = mappingGen.getTriplesMapForNodeId(alignmentNodeId); // Remove the triple maps and info that we don't need // filterTripleMapsAndAuxillaryInformation(); // Get the column that contains the key for the internal node String keyColumnHNodeId = getKeyColumnHNodeIdForAlignmentNode(alignment); if (keyColumnHNodeId == null) { return new UpdateContainer(new ErrorUpdate("Please assign a column as a key for the class")); }/* ww w.jav a 2s. c om*/ // Loop through each row that contains the column containing key HNode hNode = f.getHNode(keyColumnHNodeId); HNodePath path = hNode.getHNodePath(f); Collection<Node> nodes = new ArrayList<Node>(); worksheet.getDataTable().collectNodes(path, nodes); Map<Row, String> rowToUriMap = new HashMap<Row, String>(); // For through each row, generate the RDF, and invoke the service try { int count = 1; for (Node node : nodes) { if (count % 5 == 0) { System.out.println("Done invoking linking service for " + count + " rows"); } Row row = node.getBelongsToRow(); // Generate the RDF StringWriter outRdf = new StringWriter(); PrintWriter pw = new PrintWriter(outRdf); KR2RMLWorksheetRDFGenerator rdfGen = new KR2RMLWorksheetRDFGenerator(worksheet, vWorkspace.getRepFactory(), vWorkspace.getWorkspace().getOntologyManager(), pw, mappingGen.getMappingAuxillaryInformation(), errorReport, false); rdfGen.generateTriplesForRow(row, new HashSet<String>(), new HashSet<String>(), new HashMap<String, ReportMessage>(), new HashSet<String>()); pw.flush(); String rdf = outRdf.toString(); // Sanity check if (rdf == null || rdf.trim().isEmpty()) continue; String keyUri = rdfGen.normalizeUri(rdfGen.getTemplateTermSetPopulatedWithValues( node.getColumnValues(), trMap.getSubject().getTemplate())); rowToUriMap.put(row, keyUri); // Check if the macthes already exist in the triple store if (checkTripleStoreIfMatchAlreadyExists(keyUri)) { System.out.println("Match already exists!"); outRdf.close(); pw.close(); count++; continue; } // Invoke the linking service if no match exists in the triple store String serviceInput = rdf.replaceAll('<' + keyUri + '>', "?x"); String res = invokeReconcilitaionService(serviceInput); if (res == null || res.isEmpty()) { System.out.println("No linking output for " + serviceInput); continue; } // Insert the subject uri inside the service output int triplesStartIndex = res.indexOf("["); if (triplesStartIndex != -1) { String finalRdfOutput = res.substring(0, triplesStartIndex) + "<" + keyUri + "> <" + Uris.KM_LINKING_MATCHES_URI + "> " + res.substring(triplesStartIndex); HTTPUtil.executeHTTPPostRequest(TripleStoreUtil.defaultDataRepoUrl + "/statements", "text/n3", "", finalRdfOutput); } outRdf.close(); pw.close(); count++; } // Add a column at the same level as key column HNode linkingHNode = hNode.getHTable(f).addNewHNodeAfter(hNode.getId(), f, "LinkingMatches", worksheet, true); // Add a nested table inside the linkingHNode HTable linkingNestedTable = linkingHNode.addNestedTable("Matches", worksheet, f); HNode entityColHNode = linkingNestedTable.addHNode("Entity", worksheet, f); HNode scoreColHNode = linkingNestedTable.addNewHNodeAfter(entityColHNode.getId(), f, "Score", worksheet, true); // For each row, query the triple store to get the possible matches for (Row row : rowToUriMap.keySet()) { String subjUri = rowToUriMap.get(row); // Query the triple store to get a list of matches String query = "PREFIX d:<http://entities.restdesc.org/terms#> " + "SELECT ?entity ?score WHERE " + "{ <" + subjUri + "> <" + Uris.KM_LINKING_MATCHES_URI + "> ?x ." + " ?x d:possibleMatch ?match . " + " ?match d:entity ?entity . " + " ?match d:similarity ?score . " + "} ORDER BY DESC(?score)"; String sData = TripleStoreUtil.invokeSparqlQuery(query, TripleStoreUtil.defaultDataRepoUrl, "application/sparql-results+json", null); if (sData == null | sData.isEmpty()) { System.out.println("Empty response object from query : " + query); } JSONObject queryRes = new JSONObject(sData); if (queryRes != null) { Table linkingDataTable = row.getNode(linkingHNode.getId()).getNestedTable(); JSONArray bindings = queryRes.getJSONObject("results").getJSONArray("bindings"); if (bindings == null || bindings.length() == 0) continue; for (int i = 0; i < bindings.length(); i++) { JSONObject binding = bindings.getJSONObject(i); Row r1 = linkingDataTable.addRow(f); String score = binding.getJSONObject("score").getString("value"); if (score.length() > 5) { score = score.substring(0, 4); } r1.setValue(entityColHNode.getId(), binding.getJSONObject("entity").getString("value"), f); r1.setValue(scoreColHNode.getId(), score, f); } } } } catch (Exception e) { e.printStackTrace(); } // Prepare the output container UpdateContainer c = new UpdateContainer(); vWorkspace.getViewFactory().updateWorksheet(vWorksheetId, worksheet, worksheet.getHeaders().getAllPaths(), vWorkspace); vWorkspace.getViewFactory().getVWorksheet(this.vWorksheetId).update(c); /** Add the alignment update **/ addAlignmentUpdate(c, vWorkspace, worksheet); c.add(new InfoUpdate("Linking complete")); return c; }
From source file:hudson.model.View.java
/** * Updates the View with the new XML definition. * @param source source of the Item's new definition. * The source should be either a <code>StreamSource</code> or <code>SAXSource</code>, other sources * may not be handled.//from w ww. j av a2s . com */ public void updateByXml(Source source) throws IOException { checkPermission(CONFIGURE); StringWriter out = new StringWriter(); try { // this allows us to use UTF-8 for storing data, // plus it checks any well-formedness issue in the submitted // data XMLUtils.safeTransform(source, new StreamResult(out)); out.close(); } catch (TransformerException | SAXException e) { throw new IOException("Failed to persist configuration.xml", e); } // try to reflect the changes by reloading try (InputStream in = new BufferedInputStream(new ByteArrayInputStream(out.toString().getBytes("UTF-8")))) { // Do not allow overwriting view name as it might collide with another // view in same ViewGroup and might not satisfy Jenkins.checkGoodName. String oldname = name; ViewGroup oldOwner = owner; // oddly, this field is not transient Object o = Jenkins.XSTREAM2.unmarshal(XStream2.getDefaultDriver().createReader(in), this, null, true); if (!o.getClass().equals(getClass())) { // ensure that we've got the same view type. extending this code to support updating // to different view type requires destroying & creating a new view type throw new IOException("Expecting view type: " + this.getClass() + " but got: " + o.getClass() + " instead." + "\nShould you needed to change to a new view type, you must first delete and then re-create " + "the view with the new view type."); } name = oldname; owner = oldOwner; } catch (StreamException | ConversionException | Error e) {// mostly reflection errors throw new IOException("Unable to read", e); } save(); }