List of usage examples for java.io PrintWriter write
public void write(String s)
From source file:com.mahisoft.elasticsearchprediction.engine.ElasticsearchGenericIndexEngine.java
private void createIndex(Client client, String indexName, PrintWriter mappingFileWriter, List<String> headers, CSVRecord csvRecord) throws IOException { XContentBuilder jsonBuilder = jsonBuilder().startObject().startObject("properties"); int i = 0;/*w ww.j a va 2 s . co m*/ for (String value : csvRecord) { if (i == headers.size() - 1) { continue; } DataType dataType = typeGuesser.guess(value); mappingFileWriter.write(headers.get(i) + ":" + dataType + (i == headers.size() - 2 ? "" : ",")); jsonBuilder.startObject(headers.get(i)).field("type", dataType).field("index", "not_analyzed") .endObject(); i++; } jsonBuilder.endObject().endObject(); CreateIndexResponse createIndexResponse = client.admin().indices().prepareCreate(indexName) .addMapping(TYPE, jsonBuilder).execute().actionGet(); if (!createIndexResponse.isAcknowledged()) { String message = "Problem creating index"; LOGGER.info(message); throw new ElasticsearchGenericIndexException(message); } LOGGER.info("Index created"); }
From source file:de.akra.idocit.wsdl.services.WSDLParserTest.java
/** * Tests {@link WSDLParser#parse(org.eclipse.core.resources.IFile)} * // w w w .j a v a 2s . com * @throws Exception */ @Test public void testParseWSDL() throws Exception { /* * Positive tests * ****************************************************************************** * Test case #1: Parse the PortType of the WSDL file {@link * Constants#FOLDER_SOURCE}/CustomerService.xml". One PortType is expected. * ****************************************************************************** */ { InterfaceArtifact iStruct = null; WSDLParserMock parser = new WSDLParserMock(); iStruct = parser.parse(new File(Constants.FOLDER_SOURCE + "CustomerService.wsdl")); // iStruct = parser.parse(new File(Constants.FOLDER_SOURCE + "wsdl_46001")); assertEquals(1, iStruct.getInterfaces().size()); StringBuffer parseResult = new StringBuffer(); TestUtils.buildHierarchy(parseResult, iStruct, 0); logger.log(Level.INFO, parseResult.toString()); /* * write the result to a file */ PrintWriter writer = new PrintWriter(new BufferedWriter(new OutputStreamWriter( new FileOutputStream(Constants.FOLDER_OUT + "testParseWSDL_with_CustomerService.wsdl.out"), Charset.forName(Misc.DEFAULT_CHARSET)))); writer.write(parseResult.toString()); writer.close(); /* * *************************************************************************** * Test case #1.1: The generated structure of the prior parsed PortType of the * WSDL file {@link Constants#FOLDER_SOURCE}/CustomerService.xml" is compared * against an expected structure. * *************************************************************************** */ // check result against an expectation String expectedFileContent = TestUtils .readFile(Constants.FOLDER_EXPECTED + "testParseWSDL_with_CustomerService.xml"); assertEquals(expectedFileContent.toString(), parseResult.toString()); } /* * *************************************************************************** * Test case #1.2: The two operations in file wsdl_46001.wsdl are exspected to * have thematic grids. The names of the grids are ascending integer numbers * starting with 1. * *************************************************************************** */ { WSDLParserMock parser = new WSDLParserMock(); InterfaceArtifact iStruct = parser.parse(new File(Constants.FOLDER_SOURCE + "wsdl_46001.wsdl")); int opNumber = 0; assertEquals(1, iStruct.getInterfaces().size()); for (Interface interfac : iStruct.getInterfaces()) { assertEquals(2, interfac.getOperations().size()); for (Operation operation : interfac.getOperations()) { assertEquals(String.valueOf(opNumber), operation.getThematicGridName()); opNumber++; } } } /* * Negative tests * *************************************************************************** * Test case #1: three WSDL-operations should be classified correctly * **************************************************************************** * None */ }
From source file:com.vcredit.lrh.microservice.gateway.api.redis.SecurityHandlerRedis.java
private void unauthorizedRequest(HttpServletResponse httpServletResponse) throws IOException { JSONObject jSONObject = new JSONObject(); PrintWriter pw = httpServletResponse.getWriter(); // httpServletResponse.setStatus(HttpServletResponse.SC_UNAUTHORIZED); jSONObject.put("type", LrhConstants.ErrorCodeTypeEnum.RELOGIN.getCode()); jSONObject.put("success", false); jSONObject.put("code", HttpServletResponse.SC_UNAUTHORIZED); jSONObject.put("message", "???"); pw.write(jSONObject.toJSONString()); pw.flush();// ww w.ja v a 2s . co m }
From source file:com.safi.workshop.sqlexplorer.sqlpanel.AbstractSQLExecution.java
/** * Logs the query to the debug log file, but only if the preferences require it. If the * query failed, the exception should be included too. * /*from w ww . j av a 2 s. c o m*/ * @param query * @param e */ protected void debugLogQuery(Query query, SQLException sqlException) { // Get the logging level String level = SQLExplorerPlugin.getDefault().getPreferenceStore() .getString(IConstants.QUERY_DEBUG_LOG_LEVEL); if (level == null || level.equals(IConstants.QUERY_DEBUG_OFF)) return; if (sqlException == null && level.equals(IConstants.QUERY_DEBUG_FAILED)) return; // Get the log files; if the current log is too big, retire it File dir = SQLExplorerPlugin.getDefault().getStateLocation().toFile(); File log = new File(dir.getAbsolutePath() + '/' + "query-debug.log"); File oldLog = new File(dir.getAbsolutePath() + '/' + "query-debug.old.log"); // Too big? Then delete the old and archive the current if (log.exists() && log.length() > MAX_DEBUG_LOG_SIZE) { oldLog.delete(); log.renameTo(oldLog); } // Copy it to the output PrintWriter writer = null; try { FileWriter fw = new FileWriter(log, true); writer = new PrintWriter(fw); try { writer.write("==============================================\r\n"); StringBuffer sb = new StringBuffer(query.toString()); for (int i = 0; i < sb.length(); i++) if (sb.charAt(i) == '\n') sb.insert(i++, '\r'); sb.append("\r\n"); writer.write(sb.toString()); if (sqlException != null) writer.write("FAILED: " + sqlException.getMessage() + "\r\n"); } finally { writer.flush(); writer.close(); } } catch (IOException e) { SQLExplorerPlugin.error("Failed to log query", e); } }
From source file:jp.xet.uncommons.web.HtmlCompressionFilter.java
@Override protected void doFilterInternal(HttpServletRequest request, HttpServletResponse response, FilterChain filterChain) throws ServletException, IOException { OutputStreamResponseWrapper wrappedResponse = new OutputStreamResponseWrapper(response); filterChain.doFilter(request, wrappedResponse); ByteArrayOutputStream baos = wrappedResponse.getRealOutputStream(); if (enabled && Strings.nullToEmpty(response.getContentType()).startsWith("text/html")) { HtmlCompressor compressor = new HtmlCompressor(); compressor.setEnabled(enabled);// www.ja va 2 s . c o m compressor.setRemoveComments(removeComments); compressor.setRemoveMultiSpaces(removeMultiSpaces); compressor.setRemoveIntertagSpaces(removeIntertagSpaces); compressor.setRemoveQuotes(removeQuotes); compressor.setCompressJavaScript(compressJavaScript); compressor.setCompressCss(compressCss); compressor.setYuiJsNoMunge(yuiJsNoMunge); compressor.setYuiJsPreserveAllSemiColons(yuiJsPreserveAllSemiColons); compressor.setYuiJsDisableOptimizations(yuiJsDisableOptimizations); compressor.setYuiJsLineBreak(yuiJsLineBreak); compressor.setYuiCssLineBreak(yuiCssLineBreak); PrintWriter writer = null; try { String compressed = compressor.compress(baos.toString()); response.setContentLength(compressed.length()); writer = response.getWriter(); writer.write(compressed); } finally { if (writer != null) { writer.close(); } } } else if (baos != null) { ServletOutputStream outputStream = null; try { outputStream = response.getOutputStream(); outputStream.write(baos.toByteArray()); } finally { if (outputStream != null) { outputStream.close(); } } } }
From source file:ResourceServlet.java
public void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { //get web.xml for display by a servlet String file = "/WEB-INF/web.xml"; URL url = null;//from w ww . j a va 2 s. co m URLConnection urlConn = null; PrintWriter out = null; BufferedInputStream buf = null; try { out = response.getWriter(); url = getServletContext().getResource(file); //set response header response.setContentType("text/xml"); urlConn = url.openConnection(); //establish connection with URL presenting web.xml urlConn.connect(); buf = new BufferedInputStream(urlConn.getInputStream()); int readBytes = 0; while ((readBytes = buf.read()) != -1) out.write(readBytes); } catch (MalformedURLException mue) { throw new ServletException(mue.getMessage()); } catch (IOException ioe) { throw new ServletException(ioe.getMessage()); } finally { if (out != null) out.close(); if (buf != null) buf.close(); } }
From source file:au.org.ala.biocache.web.ExploreController.java
/** * Returns facet values that only occur in the supplied subQueryQid * and not in the parentQuery./*from w w w . ja va 2 s .c om*/ * * The facet is defined in the parentQuery. Default facet is SearchDAOImpl.NAMES_AND_LSID * * If no requestParams defined the default q=geospatial_kosher:* is used. * * @return */ @RequestMapping(value = "/explore/endemic/species/{subQueryQid}.csv", method = RequestMethod.GET) public void getSpeciesOnlyInOneQueryCSV(SpatialSearchRequestParams parentQuery, @PathVariable(value = "subQueryQid") Long subQueryQid, HttpServletResponse response) throws Exception { ParamsCacheObject qid = ParamsCache.getParamCacheObjectFromQuery("qid:" + subQueryQid); SpatialSearchRequestParams subQuery = new SpatialSearchRequestParams(); subQuery.setQ(qid.getQ()); subQuery.setFacets(qid.getFqs()); subQuery.setWkt(qid.getWkt()); if (parentQuery.getQ() == null) { parentQuery.setQ("geospatial_kosher:*"); } if (parentQuery.getFacets() == null || parentQuery.getFacets().length == 0) { parentQuery.setFacets(new String[] { SearchDAOImpl.NAMES_AND_LSID }); } if (subQuery != null) { if (parentQuery.getFacets() != null && parentQuery.getFacets().length == 1) { List<FieldResultDTO> list = searchDao.getSubquerySpeciesOnly(subQuery, parentQuery); response.setCharacterEncoding("UTF-8"); response.setContentType("text/plain"); java.io.PrintWriter writer = response.getWriter(); writer.write("Family,Scientific name,Common name,Taxon rank,LSID,# Occurrences"); for (FieldResultDTO item : list) { String s = item.getLabel(); if (s.startsWith("\"") && s.endsWith("\"") && s.length() > 2) s = s.substring(1, s.length() - 1); String[] values = s.split("\\|", 6); if (values.length >= 5) { writer.write( "\n" + values[4] + ",\"" + values[0].replace("\"", "\"\"").replace("\\", "\\\\") + "\"," + "\"" + values[2].replace("\"", "\"\"").replace("\\", "\\\\") + "\",," + values[1] + "," + item.getCount()); } } writer.flush(); writer.close(); } else { response.sendError(HttpServletResponse.SC_BAD_REQUEST, "Please supply only one facet."); } } else { response.sendError(HttpServletResponse.SC_BAD_REQUEST, "Please supply a valid sub query qid."); } }
From source file:net.nan21.dnet.core.web.controller.data.AbstractDsReadController.java
/** * Returns information about the given resource ( data-source ) * /*from w ww . j a v a2 s .c om*/ * @param resourceName * @param dataFormat * @param request * @param response * @return * @throws Exception */ @RequestMapping(params = Constants.REQUEST_PARAM_ACTION + "=" + Constants.DS_ACTION_INFO) public String info(@PathVariable String resourceName, @PathVariable String dataFormat, HttpServletRequest request, HttpServletResponse response) throws Exception { try { StopWatch stopWatch = new StopWatch(); stopWatch.start(); if (logger.isInfoEnabled()) { logger.info("Processing request: {}.{} -> action = {} ", new String[] { resourceName, dataFormat, Constants.DS_ACTION_INFO }); } this.prepareRequest(request, response); @SuppressWarnings("unchecked") List<IDsDefinitions> defsList = (List<IDsDefinitions>) this.getApplicationContext() .getBean("osgiDsDefinitions"); String out = null; for (IDsDefinitions defs : defsList) { if (defs.containsDs(resourceName)) { IDsDefinition def = defs.getDsDefinition(resourceName); ((DsDefinition) def).getModelFields(); ((DsDefinition) def).getFilterFields(); ((DsDefinition) def).getParamFields(); IDsService<M, F, P> service = this.findDsService(resourceName); if (dataFormat.equals(IDsMarshaller.JSON)) { IDsMarshaller<M, F, P> marshaller = service.createMarshaller(dataFormat); response.setContentType("text/plain; charset=UTF-8"); out = ((ObjectMapper) marshaller.getDelegate()).writeValueAsString(def); PrintWriter w = response.getWriter(); w.write(out); w.flush(); return null; } else if (dataFormat.equals(IDsMarshaller.XML)) { IDsMarshaller<M, F, P> marshaller = service.createMarshaller(dataFormat); StringWriter writer = new StringWriter(); ((XmlMarshaller<M, F, P>) marshaller).createMarshaller(def.getClass()).marshal(def, writer); response.setContentType("text/xml; charset=UTF-8"); out = writer.toString(); PrintWriter w = response.getWriter(); w.write(out); w.flush(); return null; } else if (dataFormat.equals("html")) { IDsMarshaller<M, F, P> marshaller = service.createMarshaller(IDsMarshaller.XML); StringWriter writer = new StringWriter(); ((XmlMarshaller<M, F, P>) marshaller).createMarshaller(def.getClass()).marshal(def, writer); out = writer.toString(); String t1 = "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"yes\"?>"; String t2 = "<?xml-stylesheet type=\"text/xsl\" href=\"/nan21.dnet.core.web/webapp/resources/xsl/ds-info.xsl\"?>"; out = out.replace(t1, t1 + '\n' + t2); response.setContentType("text/xml; charset=UTF-8"); PrintWriter w = response.getWriter(); w.write(out); w.flush(); return null; } } } throw new Exception("Data-source " + resourceName + " cannot be found."); } catch (Exception e) { return this.handleException(e, response); } finally { this.finishRequest(); } }
From source file:com.openshift.internal.restclient.http.UrlConnectionHttpClient.java
protected String request(HttpMethod httpMethod, URL url, int timeout, IResource resource) throws SocketTimeoutException, HttpClientException { HttpURLConnection connection = null; try {//from w w w . ja v a 2 s.com connection = createConnection(url, userAgent, acceptedVersion, acceptedMediaType, sslAuthorizationCallback, timeout); if (httpMethod == HttpMethod.POST || httpMethod == HttpMethod.PUT) { setContentTypeHeader(acceptedVersion, acceptedMediaType, connection); } // PATCH not yet supported by JVM setRequestMethod(httpMethod, connection); if (LOGGER.isDebugEnabled()) { LOGGER.debug(String.format("Request Properties: %s", connection.getRequestProperties())); LOGGER.debug(String.format("Request Method: %s", connection.getRequestMethod())); } if (resource != null) { if (LOGGER.isDebugEnabled()) LOGGER.debug(resource.toJson(false)); connection.setDoOutput(true); PrintWriter writer = new PrintWriter(connection.getOutputStream()); writer.write(resource.toString()); writer.flush(); } return IOUtils.toString(connection.getInputStream(), "UTF-8"); } catch (SocketTimeoutException e) { throw e; } catch (IOException e) { throw createException(e, connection); } finally { disconnect(connection); } }
From source file:com.ephesoft.gxt.admin.server.ImportDocumentTypeUploadServlet.java
/** * This API is used to process attach file. * //from w w w . j a v a 2 s . c om * @param req {@link HttpServletRequest}. * @param resp {@link HttpServletResponse}. * @param bSService {@link BatchSchemaService}. * @return */ private void attachFile(final HttpServletRequest req, final HttpServletResponse resp, final BatchSchemaService bSService) throws IOException { final PrintWriter printWriter = resp.getWriter(); String tempZipFile = null; if (ServletFileUpload.isMultipartContent(req)) { final String serDirPath = bSService.getBatchExportFolderLocation(); final ServletFileUpload upload = getUploadedFile(serDirPath); tempZipFile = processUploadedFile(upload, req, printWriter, serDirPath); printWriter.write(tempZipFile); } else { log.error("Request contents type is not supported."); printWriter.write("Request contents type is not supported."); } printWriter.flush(); }