List of usage examples for javax.servlet.http HttpServletRequest getContentType
public String getContentType();
null
if the type is not known. From source file:org.eclipse.rdf4j.http.server.repository.statements.StatementsController.java
private ModelAndView getSparqlUpdateResult(Repository repository, HttpServletRequest request, HttpServletResponse response) throws ServerHTTPException, ClientHTTPException, HTTPException { ProtocolUtil.logRequestParameters(request); String mimeType = HttpServerUtil.getMIMEType(request.getContentType()); String sparqlUpdateString;//ww w .java 2s .co m if (Protocol.SPARQL_UPDATE_MIME_TYPE.equals(mimeType)) { // The query should be the entire body try { sparqlUpdateString = IOUtils.toString(request.getReader()); } catch (IOException e) { throw new ClientHTTPException(SC_BAD_REQUEST, "Error reading request message body", e); } if (sparqlUpdateString.isEmpty()) sparqlUpdateString = null; } else { sparqlUpdateString = request.getParameterValues(Protocol.UPDATE_PARAM_NAME)[0]; } // default query language is SPARQL QueryLanguage queryLn = QueryLanguage.SPARQL; String queryLnStr = request.getParameter(QUERY_LANGUAGE_PARAM_NAME); logger.debug("query language param = {}", queryLnStr); if (queryLnStr != null) { queryLn = QueryLanguage.valueOf(queryLnStr); if (queryLn == null) { throw new ClientHTTPException(SC_BAD_REQUEST, "Unknown query language: " + queryLnStr); } } String baseURI = request.getParameter(Protocol.BASEURI_PARAM_NAME); // determine if inferred triples should be included in query evaluation boolean includeInferred = ProtocolUtil.parseBooleanParam(request, INCLUDE_INFERRED_PARAM_NAME, true); // build a dataset, if specified String[] defaultRemoveGraphURIs = request.getParameterValues(REMOVE_GRAPH_PARAM_NAME); String[] defaultInsertGraphURIs = request.getParameterValues(INSERT_GRAPH_PARAM_NAME); String[] defaultGraphURIs = request.getParameterValues(USING_GRAPH_PARAM_NAME); String[] namedGraphURIs = request.getParameterValues(USING_NAMED_GRAPH_PARAM_NAME); SimpleDataset dataset = null; if (defaultRemoveGraphURIs != null || defaultInsertGraphURIs != null || defaultGraphURIs != null || namedGraphURIs != null) { dataset = new SimpleDataset(); } if (defaultRemoveGraphURIs != null) { for (String graphURI : defaultRemoveGraphURIs) { try { IRI uri = createURIOrNull(repository, graphURI); dataset.addDefaultRemoveGraph(uri); } catch (IllegalArgumentException e) { throw new ClientHTTPException(SC_BAD_REQUEST, "Illegal URI for default remove graph: " + graphURI); } } } if (defaultInsertGraphURIs != null && defaultInsertGraphURIs.length > 0) { String graphURI = defaultInsertGraphURIs[0]; try { IRI uri = createURIOrNull(repository, graphURI); dataset.setDefaultInsertGraph(uri); } catch (IllegalArgumentException e) { throw new ClientHTTPException(SC_BAD_REQUEST, "Illegal URI for default insert graph: " + graphURI); } } if (defaultGraphURIs != null) { for (String defaultGraphURI : defaultGraphURIs) { try { IRI uri = createURIOrNull(repository, defaultGraphURI); dataset.addDefaultGraph(uri); } catch (IllegalArgumentException e) { throw new ClientHTTPException(SC_BAD_REQUEST, "Illegal URI for default graph: " + defaultGraphURI); } } } if (namedGraphURIs != null) { for (String namedGraphURI : namedGraphURIs) { try { IRI uri = createURIOrNull(repository, namedGraphURI); dataset.addNamedGraph(uri); } catch (IllegalArgumentException e) { throw new ClientHTTPException(SC_BAD_REQUEST, "Illegal URI for named graph: " + namedGraphURI); } } } final int maxQueryTime = ProtocolUtil.parseTimeoutParam(request); try (RepositoryConnection repositoryCon = RepositoryInterceptor.getRepositoryConnection(request)) { Update update = repositoryCon.prepareUpdate(queryLn, sparqlUpdateString, baseURI); update.setIncludeInferred(includeInferred); update.setMaxExecutionTime(maxQueryTime); if (dataset != null) { update.setDataset(dataset); } // determine if any variable bindings have been set on this // update. @SuppressWarnings("unchecked") Enumeration<String> parameterNames = request.getParameterNames(); while (parameterNames.hasMoreElements()) { String parameterName = parameterNames.nextElement(); if (parameterName.startsWith(BINDING_PREFIX) && parameterName.length() > BINDING_PREFIX.length()) { String bindingName = parameterName.substring(BINDING_PREFIX.length()); Value bindingValue = ProtocolUtil.parseValueParam(request, parameterName, repository.getValueFactory()); update.setBinding(bindingName, bindingValue); } } update.execute(); return new ModelAndView(EmptySuccessView.getInstance()); } catch (QueryInterruptedException e) { throw new ServerHTTPException(SC_SERVICE_UNAVAILABLE, "update execution took too long"); } catch (UpdateExecutionException e) { if (e.getCause() != null && e.getCause() instanceof HTTPException) { // custom signal from the backend, throw as HTTPException // directly // (see SES-1016). throw (HTTPException) e.getCause(); } else { throw new ServerHTTPException("Repository update error: " + e.getMessage(), e); } } catch (RepositoryException e) { if (e.getCause() != null && e.getCause() instanceof HTTPException) { // custom signal from the backend, throw as HTTPException // directly // (see SES-1016). throw (HTTPException) e.getCause(); } else { throw new ServerHTTPException("Repository update error: " + e.getMessage(), e); } } catch (MalformedQueryException e) { ErrorInfo errInfo = new ErrorInfo(ErrorType.MALFORMED_QUERY, e.getMessage()); throw new ClientHTTPException(SC_BAD_REQUEST, errInfo.toString()); } }
From source file:org.openrdf.http.server.repository.statements.StatementsController.java
private ModelAndView getSparqlUpdateResult(Repository repository, HttpServletRequest request, HttpServletResponse response) throws ServerHTTPException, ClientHTTPException, HTTPException { ProtocolUtil.logRequestParameters(request); String mimeType = HttpServerUtil.getMIMEType(request.getContentType()); String sparqlUpdateString;/*from w w w . j av a2 s . c o m*/ if (Protocol.SPARQL_UPDATE_MIME_TYPE.equals(mimeType)) { // The query should be the entire body try { sparqlUpdateString = IOUtils.toString(request.getReader()); } catch (IOException e) { throw new ClientHTTPException(SC_BAD_REQUEST, "Error reading request message body", e); } if (sparqlUpdateString.isEmpty()) sparqlUpdateString = null; } else { sparqlUpdateString = request.getParameterValues(Protocol.UPDATE_PARAM_NAME)[0]; } // default query language is SPARQL QueryLanguage queryLn = QueryLanguage.SPARQL; String queryLnStr = request.getParameter(QUERY_LANGUAGE_PARAM_NAME); logger.debug("query language param = {}", queryLnStr); if (queryLnStr != null) { queryLn = QueryLanguage.valueOf(queryLnStr); if (queryLn == null) { throw new ClientHTTPException(SC_BAD_REQUEST, "Unknown query language: " + queryLnStr); } } String baseURI = request.getParameter(Protocol.BASEURI_PARAM_NAME); // determine if inferred triples should be included in query evaluation boolean includeInferred = ProtocolUtil.parseBooleanParam(request, INCLUDE_INFERRED_PARAM_NAME, true); // build a dataset, if specified String[] defaultRemoveGraphURIs = request.getParameterValues(REMOVE_GRAPH_PARAM_NAME); String[] defaultInsertGraphURIs = request.getParameterValues(INSERT_GRAPH_PARAM_NAME); String[] defaultGraphURIs = request.getParameterValues(USING_GRAPH_PARAM_NAME); String[] namedGraphURIs = request.getParameterValues(USING_NAMED_GRAPH_PARAM_NAME); SimpleDataset dataset = new SimpleDataset(); if (defaultRemoveGraphURIs != null) { for (String graphURI : defaultRemoveGraphURIs) { try { IRI uri = createURIOrNull(repository, graphURI); dataset.addDefaultRemoveGraph(uri); } catch (IllegalArgumentException e) { throw new ClientHTTPException(SC_BAD_REQUEST, "Illegal URI for default remove graph: " + graphURI); } } } if (defaultInsertGraphURIs != null && defaultInsertGraphURIs.length > 0) { String graphURI = defaultInsertGraphURIs[0]; try { IRI uri = createURIOrNull(repository, graphURI); dataset.setDefaultInsertGraph(uri); } catch (IllegalArgumentException e) { throw new ClientHTTPException(SC_BAD_REQUEST, "Illegal URI for default insert graph: " + graphURI); } } if (defaultGraphURIs != null) { for (String defaultGraphURI : defaultGraphURIs) { try { IRI uri = createURIOrNull(repository, defaultGraphURI); dataset.addDefaultGraph(uri); } catch (IllegalArgumentException e) { throw new ClientHTTPException(SC_BAD_REQUEST, "Illegal URI for default graph: " + defaultGraphURI); } } } if (namedGraphURIs != null) { for (String namedGraphURI : namedGraphURIs) { try { IRI uri = createURIOrNull(repository, namedGraphURI); dataset.addNamedGraph(uri); } catch (IllegalArgumentException e) { throw new ClientHTTPException(SC_BAD_REQUEST, "Illegal URI for named graph: " + namedGraphURI); } } } try { RepositoryConnection repositoryCon = RepositoryInterceptor.getRepositoryConnection(request); synchronized (repositoryCon) { Update update = repositoryCon.prepareUpdate(queryLn, sparqlUpdateString, baseURI); update.setIncludeInferred(includeInferred); if (dataset != null) { update.setDataset(dataset); } // determine if any variable bindings have been set on this update. @SuppressWarnings("unchecked") Enumeration<String> parameterNames = request.getParameterNames(); while (parameterNames.hasMoreElements()) { String parameterName = parameterNames.nextElement(); if (parameterName.startsWith(BINDING_PREFIX) && parameterName.length() > BINDING_PREFIX.length()) { String bindingName = parameterName.substring(BINDING_PREFIX.length()); Value bindingValue = ProtocolUtil.parseValueParam(request, parameterName, repository.getValueFactory()); update.setBinding(bindingName, bindingValue); } } update.execute(); } return new ModelAndView(EmptySuccessView.getInstance()); } catch (UpdateExecutionException e) { if (e.getCause() != null && e.getCause() instanceof HTTPException) { // custom signal from the backend, throw as HTTPException directly // (see SES-1016). throw (HTTPException) e.getCause(); } else { throw new ServerHTTPException("Repository update error: " + e.getMessage(), e); } } catch (RepositoryException e) { if (e.getCause() != null && e.getCause() instanceof HTTPException) { // custom signal from the backend, throw as HTTPException directly // (see SES-1016). throw (HTTPException) e.getCause(); } else { throw new ServerHTTPException("Repository update error: " + e.getMessage(), e); } } catch (MalformedQueryException e) { ErrorInfo errInfo = new ErrorInfo(ErrorType.MALFORMED_QUERY, e.getMessage()); throw new ClientHTTPException(SC_BAD_REQUEST, errInfo.toString()); } }
From source file:org.apache.solr.servlet.SolrRequestParserTest.java
License:asdf
public void doAutoDetect(String userAgent, String method, final String body, String expectedContentType, String expectedKey, String expectedValue) throws Exception { String uri = "/solr/select"; String contentType = "application/x-www-form-urlencoded"; int contentLength = -1; // does this mean auto-detect? HttpServletRequest request = createMock(HttpServletRequest.class); expect(request.getHeader("User-Agent")).andReturn(userAgent).anyTimes(); expect(request.getHeader("Content-Length")).andReturn(null).anyTimes(); expect(request.getRequestURI()).andReturn(uri).anyTimes(); expect(request.getContentType()).andReturn(contentType).anyTimes(); expect(request.getContentLength()).andReturn(contentLength).anyTimes(); expect(request.getAttribute(SolrRequestParsers.REQUEST_TIMER_SERVLET_ATTRIBUTE)).andReturn(null).anyTimes(); expect(request.getMethod()).andReturn(method).anyTimes(); // we dont pass a content-length to let the security mechanism limit it: expect(request.getQueryString()).andReturn("foo=1&bar=2").anyTimes(); expect(request.getInputStream())/*from w ww . j ava2 s .c o m*/ .andReturn(new ByteServletInputStream(body.getBytes(StandardCharsets.US_ASCII))); expect(request.getAttribute(EasyMock.anyObject(String.class))).andReturn(null).anyTimes(); replay(request); SolrRequestParsers parsers = new SolrRequestParsers(h.getCore().getSolrConfig()); SolrQueryRequest req = parsers.parse(h.getCore(), "/select", request); int num = 0; if (expectedContentType != null) { for (ContentStream cs : req.getContentStreams()) { num++; assertTrue(cs.getContentType().startsWith(expectedContentType)); String returnedBody = IOUtils.toString(cs.getReader()); assertEquals(body, returnedBody); } assertEquals(1, num); } assertEquals("1", req.getParams().get("foo")); assertEquals("2", req.getParams().get("bar")); if (expectedKey != null) { assertEquals(expectedValue, req.getParams().get(expectedKey)); } req.close(); }
From source file:com.streamsets.pipeline.stage.origin.sdcipctokafka.IpcToKafkaServlet.java
@Override protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { String requestor = req.getRemoteAddr() + ":" + req.getRemotePort(); if (shuttingDown) { LOG.debug("Shutting down, discarding incoming request from '{}'", requestor); resp.setStatus(HttpServletResponse.SC_GONE); } else {/*from w w w . j a v a 2 s. c o m*/ String appId = req.getHeader(Constants.X_SDC_APPLICATION_ID_HEADER); String compression = req.getHeader(Constants.X_SDC_COMPRESSION_HEADER); String contentType = req.getContentType(); String json1Fragmentable = req.getHeader(Constants.X_SDC_JSON1_FRAGMENTABLE_HEADER); if (!Constants.APPLICATION_BINARY.equals(contentType)) { invalidRequestMeter.mark(); resp.sendError(HttpServletResponse.SC_BAD_REQUEST, Utils.format( "Wrong content-type '{}', expected '{}'", contentType, Constants.APPLICATION_BINARY)); } else if (!"true".equals(json1Fragmentable)) { invalidRequestMeter.mark(); resp.sendError(HttpServletResponse.SC_BAD_REQUEST, Utils.format( "RPC client is not using a fragmentable JSON1 encoding, client;s SDC must be upgraded")); } else if (!configs.appId.equals(appId)) { invalidRequestMeter.mark(); LOG.warn("IPC from '{}' invalid appId '{}', rejected", requestor, appId); resp.sendError(HttpServletResponse.SC_FORBIDDEN, "Invalid 'appId'"); } else { long start = System.currentTimeMillis(); LOG.debug("Request accepted from '{}'", requestor); try (InputStream in = req.getInputStream()) { InputStream is = in; boolean processRequest = true; if (compression != null) { switch (compression) { case Constants.SNAPPY_COMPRESSION: is = new SnappyFramedInputStream(is, true); break; default: invalidRequestMeter.mark(); LOG.warn("Invalid compression '{}' in request from '{}', returning error", compression, requestor); resp.sendError(HttpServletResponse.SC_UNSUPPORTED_MEDIA_TYPE, "Unsupported compression: " + compression); processRequest = false; } } if (processRequest) { LOG.debug("Processing request from '{}'", requestor); List<byte[]> messages = SdcStreamFragmenter.fragment(is, maxMessageSize, maxRpcRequestSize); LOG.debug("Request from '{}' broken into '{}' messages", requestor, messages.size()); long kStart = System.currentTimeMillis(); SdcKafkaProducer producer = getKafkaProducer(); long kafkaTime = System.currentTimeMillis() - kStart; try { for (byte[] message : messages) { // we are using round robing partition strategy, partition key is ignored kStart = System.currentTimeMillis(); producer.enqueueMessage(configs.topic, message, ""); kafkaTime += System.currentTimeMillis() - kStart; } kStart = System.currentTimeMillis(); producer.write(); kafkaTime += System.currentTimeMillis() - kStart; resp.setStatus(HttpServletResponse.SC_OK); requestMeter.mark(); } catch (StageException ex) { LOG.warn("Kakfa producer error: {}", ex.toString(), ex); errorQueue.offer(ex); errorRequestMeter.mark(); LOG.warn("Error while reading payload from '{}': {}", requestor, ex.toString(), ex); resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, ex.toString()); } finally { kStart = System.currentTimeMillis(); releaseKafkaProducer(producer); kafkaTime += System.currentTimeMillis() - kStart; } kafkaTimer.update(kafkaTime, TimeUnit.MILLISECONDS); kafkaMessagesMeter.mark(messages.size()); } } catch (Exception ex) { errorRequestMeter.mark(); LOG.warn("Error while reading payload from '{}': {}", requestor, ex.toString(), ex); resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, ex.toString()); } finally { requestTimer.update(System.currentTimeMillis() - start, TimeUnit.MILLISECONDS); } } } }
From source file:org.codehaus.groovy.grails.web.servlet.mvc.GrailsParameterMap.java
/** * Creates a GrailsParameterMap populating from the given request object * @param request The request object/*from ww w . j a v a 2 s . c om*/ */ public GrailsParameterMap(HttpServletRequest request) { this.request = request; final Map requestMap = new LinkedHashMap(request.getParameterMap()); if (requestMap.isEmpty() && "PUT".equals(request.getMethod()) && request.getAttribute(REQUEST_BODY_PARSED) == null) { // attempt manual parse of request body. This is here because some containers don't parse the request body automatically for PUT request String contentType = request.getContentType(); if ("application/x-www-form-urlencoded".equals(contentType)) { try { String contents = IOUtils.toString(request.getReader()); request.setAttribute(REQUEST_BODY_PARSED, true); requestMap.putAll(WebUtils.fromQueryString(contents)); } catch (Exception e) { LOG.error("Error processing form encoded PUT request", e); } } } if (request instanceof MultipartHttpServletRequest) { Map<String, MultipartFile> fileMap = ((MultipartHttpServletRequest) request).getFileMap(); for (Map.Entry<String, MultipartFile> entry : fileMap.entrySet()) { requestMap.put(entry.getKey(), entry.getValue()); } } updateNestedKeys(requestMap); }
From source file:org.osaf.cosmo.mc.MorseCodeServlet.java
private boolean checkWritePreconditions(HttpServletRequest req, HttpServletResponse resp) { if (req.getContentLength() <= 0) { resp.setStatus(HttpServletResponse.SC_LENGTH_REQUIRED); return false; }/* w w w . ja va2 s.com*/ if (req.getContentType() == null || !req.getContentType().startsWith(MEDIA_TYPE_EIMML)) { resp.setStatus(HttpServletResponse.SC_UNSUPPORTED_MEDIA_TYPE); return false; } if (req.getHeader("Content-Transfer-Encoding") != null || req.getHeader("Content-Encoding") != null || req.getHeader("Content-Base") != null || req.getHeader("Content-Location") != null || req.getHeader("Content-MD5") != null || req.getHeader("Content-Range") != null) { resp.setStatus(HttpServletResponse.SC_NOT_IMPLEMENTED); return false; } return true; }
From source file:admin.controller.ServletEditPersonality.java
/** * Processes requests for both HTTP <code>GET</code> and <code>POST</code> * methods.//from w ww .j a va2s . c om * * @param request servlet request * @param response servlet response * @throws ServletException if a servlet-specific error occurs * @throws IOException if an I/O error occurs */ @Override public void processRequest(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { super.processRequest(request, response); response.setContentType("text/html;charset=UTF-8"); PrintWriter out = response.getWriter(); File file; int maxFileSize = 5000 * 1024; int maxMemSize = 5000 * 1024; try { uploadPath = AppConstants.BRAND_IMAGES_HOME; deletePath = AppConstants.BRAND_IMAGES_HOME; // Verify the content type String contentType = request.getContentType(); if ((contentType.indexOf("multipart/form-data") >= 0)) { DiskFileItemFactory factory = new DiskFileItemFactory(); // maximum size that will be stored in memory factory.setSizeThreshold(maxMemSize); // Location to save data that is larger than maxMemSize. factory.setRepository(new File(AppConstants.TMP_FOLDER)); // Create a new file upload handler ServletFileUpload upload = new ServletFileUpload(factory); // maximum file size to be uploaded. upload.setSizeMax(maxFileSize); // Parse the request to get file items. List fileItems = upload.parseRequest(request); // Process the uploaded file items Iterator i = fileItems.iterator(); out.println("<html>"); out.println("<head>"); out.println("<title>JSP File upload</title>"); out.println("</head>"); out.println("<body>"); while (i.hasNext()) { FileItem fi = (FileItem) i.next(); if (fi.isFormField()) { // Get the uploaded file parameters fieldName = fi.getFieldName(); if (fieldName.equals("brandname")) { brandname = fi.getString(); } if (fieldName.equals("brandid")) { brandid = fi.getString(); } if (fieldName.equals("look")) { lookid = fi.getString(); } file_name_to_delete = brand.getFileName(Integer.parseInt(brandid)); } else { fieldName = fi.getFieldName(); fileName = fi.getName(); File uploadDir = new File(uploadPath); if (!uploadDir.exists()) { uploadDir.mkdirs(); } // int inStr = fileName.indexOf("."); // String Str = fileName.substring(0, inStr); // // fileName = brandname + "_" + Str + ".jpeg"; fileName = brandname + "_" + fileName; boolean isInMemory = fi.isInMemory(); long sizeInBytes = fi.getSize(); String file_path = uploadPath + File.separator + fileName; String delete_path = deletePath + File.separator + file_name_to_delete; File deleteFile = new File(delete_path); deleteFile.delete(); File storeFile = new File(file_path); fi.write(storeFile); out.println("Uploaded Filename: " + filePath + "<br>"); } } brand.editBrands(Integer.parseInt(brandid), brandname, Integer.parseInt(lookid), fileName); response.sendRedirect(request.getContextPath() + "/admin/brandpersonality.jsp"); // request_dispatcher = request.getRequestDispatcher("/admin/looks.jsp"); // request_dispatcher.forward(request, response); out.println("</body>"); out.println("</html>"); } else { out.println("<html>"); out.println("<head>"); out.println("<title>Servlet upload</title>"); out.println("</head>"); out.println("<body>"); out.println("<p>No file uploaded</p>"); out.println("</body>"); out.println("</html>"); } } catch (Exception ex) { logger.log(Level.SEVERE, "", ex); } finally { out.close(); } }
From source file:com.streamsets.pipeline.stage.origin.ipctokafka.IpcToKafkaServlet.java
@Override protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { String requestor = req.getRemoteAddr() + ":" + req.getRemotePort(); if (shuttingDown) { LOG.debug("Shutting down, discarding incoming request from '{}'", requestor); resp.setStatus(HttpServletResponse.SC_GONE); } else {/*from ww w . j a v a 2 s . c o m*/ String appId = req.getHeader(Constants.X_SDC_APPLICATION_ID_HEADER); String compression = req.getHeader(Constants.X_SDC_COMPRESSION_HEADER); String contentType = req.getContentType(); String json1Fragmentable = req.getHeader(Constants.X_SDC_JSON1_FRAGMENTABLE_HEADER); if (!Constants.APPLICATION_BINARY.equals(contentType)) { invalidRequestMeter.mark(); resp.sendError(HttpServletResponse.SC_BAD_REQUEST, Utils.format( "Wrong content-type '{}', expected '{}'", contentType, Constants.APPLICATION_BINARY)); } else if (!"true".equals(json1Fragmentable)) { invalidRequestMeter.mark(); resp.sendError(HttpServletResponse.SC_BAD_REQUEST, Utils.format( "RPC client is not using a fragmentable JSON1 encoding, client;s SDC must be upgraded")); } else if (!configs.appId.equals(appId)) { invalidRequestMeter.mark(); LOG.warn("IPC from '{}' invalid appId '{}', rejected", requestor, appId); resp.sendError(HttpServletResponse.SC_FORBIDDEN, "Invalid 'appId'"); } else { long start = System.currentTimeMillis(); LOG.debug("Request accepted from '{}'", requestor); try (InputStream in = req.getInputStream()) { InputStream is = in; boolean processRequest = true; if (compression != null) { switch (compression) { case Constants.SNAPPY_COMPRESSION: is = new SnappyFramedInputStream(is, true); break; default: invalidRequestMeter.mark(); LOG.warn("Invalid compression '{}' in request from '{}', returning error", compression, requestor); resp.sendError(HttpServletResponse.SC_UNSUPPORTED_MEDIA_TYPE, "Unsupported compression: " + compression); processRequest = false; } } if (processRequest) { LOG.debug("Processing request from '{}'", requestor); List<byte[]> messages = SdcStreamFragmenter.fragment(is, maxMessageSize, maxRpcRequestSize); LOG.debug("Request from '{}' broken into '{}' messages", requestor, messages.size()); long kStart = System.currentTimeMillis(); SdcKafkaProducer producer = getKafkaProducer(); long kafkaTime = System.currentTimeMillis() - kStart; try { for (byte[] message : messages) { // we are using round robing partition strategy, partition key is ignored kStart = System.currentTimeMillis(); producer.enqueueMessage(kafkaConfigBean.kafkaConfig.topic, message, ""); kafkaTime += System.currentTimeMillis() - kStart; } kStart = System.currentTimeMillis(); producer.write(); kafkaTime += System.currentTimeMillis() - kStart; resp.setStatus(HttpServletResponse.SC_OK); requestMeter.mark(); } catch (StageException ex) { LOG.warn("Kakfa producer error: {}", ex.toString(), ex); errorQueue.offer(ex); errorRequestMeter.mark(); LOG.warn("Error while reading payload from '{}': {}", requestor, ex.toString(), ex); resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, ex.toString()); } finally { kStart = System.currentTimeMillis(); releaseKafkaProducer(producer); kafkaTime += System.currentTimeMillis() - kStart; } kafkaTimer.update(kafkaTime, TimeUnit.MILLISECONDS); kafkaMessagesMeter.mark(messages.size()); } } catch (Exception ex) { errorRequestMeter.mark(); LOG.warn("Error while reading payload from '{}': {}", requestor, ex.toString(), ex); resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, ex.toString()); } finally { requestTimer.update(System.currentTimeMillis() - start, TimeUnit.MILLISECONDS); } } } }
From source file:org.codeartisans.proxilet.Proxilet.java
/** * Performs an HTTP POST request./*from w ww. ja va 2s . c o m*/ * * @param httpServletRequest The {@link HttpServletRequest} object passed in by the servlet engine representing the client request to be proxied * @param httpServletResponse The {@link HttpServletResponse} object by which we can send a proxied response to the client */ @Override public void doPost(HttpServletRequest httpServletRequest, HttpServletResponse httpServletResponse) throws IOException, ServletException { // Create a standard POST request String contentType = httpServletRequest.getContentType(); String destinationUrl = this.getProxyURL(httpServletRequest); LOGGER.trace("POST {} => {} ({})" + httpServletRequest.getRequestURL(), destinationUrl, contentType); PostMethod postMethodProxyRequest = new PostMethod(destinationUrl); // Forward the request headers setProxyRequestHeaders(httpServletRequest, postMethodProxyRequest); // Check if this is a mulitpart (file upload) POST if (ServletFileUpload.isMultipartContent(httpServletRequest)) { this.handleMultipartPost(postMethodProxyRequest, httpServletRequest); } else { if (contentType == null || PostMethod.FORM_URL_ENCODED_CONTENT_TYPE.equals(contentType)) { this.handleStandardPost(postMethodProxyRequest, httpServletRequest); } else { this.handleContentPost(postMethodProxyRequest, httpServletRequest); } } // Execute the proxy request this.executeProxyRequest(postMethodProxyRequest, httpServletRequest, httpServletResponse); }
From source file:Package.Projectoverviewservlet.java
@Override protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { frame.setAlwaysOnTop(true);/* ww w.jav a 2 s. c o m*/ if (request.getParameter("submit") != null) { Database database = null; try { database = new Database(); database.Connect(); } catch (SQLException | ClassNotFoundException ex) { Logger.getLogger(Projectoverviewservlet.class.getName()).log(Level.SEVERE, null, ex); } try { String contentType = request.getContentType(); if ((contentType.indexOf("multipart/form-data") >= 0)) { try { Part filePart = request.getPart("fileupload"); Image image = ImageIO.read(filePart.getInputStream()); String INSERT_PICTURE = "INSERT INTO \"PICTURE\"(\"PICTUREID\", \"PROJECTID\", \"HEIGHT\", \"WIDTH\", \"COLORTYPE\", \"PICTURE\") VALUES (PictureSequence.nextval, 1," + image.getHeight(null) + "," + image.getWidth(null) + ", 'Color', ?)"; InputStream is = null; PreparedStatement ps = null; try { is = filePart.getInputStream(); ps = database.myConn.prepareStatement(INSERT_PICTURE); ps.setBlob(1, is); ps.executeUpdate(); database.myConn.commit(); JOptionPane.showMessageDialog(frame, "De afbeelding is succesvol geupload."); } finally { try { ps.close(); is.close(); } catch (Exception exception) { } } } catch (IOException | ServletException | SQLException ex) { System.out.println(ex); } } else { JOptionPane.showMessageDialog(frame, "Er is iets fout gegaan probeer het opnieuw"); } } catch (Exception ex) { JOptionPane.showMessageDialog(frame, ex.getMessage()); } response.sendRedirect("projectoverview.jsp"); } if (request.getParameter("deleteproject") != null) { String[] selectresults = request.getParameterValues("selectproject"); po.deleteProject(Integer.parseInt(selectresults[0])); } if (request.getParameter("openproject") != null) { try { String[] selectresults = request.getParameterValues("selectproject"); Project project = po.getProject(Integer.parseInt(selectresults[0])); request.setAttribute("project", project); request.getRequestDispatcher("projectoverview.jsp").forward(request, response); } catch (Exception ex) { JOptionPane.showMessageDialog(frame, ex.getMessage()); } } if (request.getParameter("Save") != null) { Project project = (Project) request.getAttribute("project"); if (project != null) { try { System.out.println(request.getParameter("startdate")); po.updateProject(project.getProjectID(), request.getParameter("name"), request.getParameter("client"), new SimpleDateFormat("yyyy-MM-dd").parse(request.getParameter("startdate")), new SimpleDateFormat("yyyy-MM-dd").parse(request.getParameter("enddate"))); } catch (Exception ex) { } } else { String username = ""; for (Cookie cookie : request.getCookies()) { if (cookie.getName().equals("Email")) { username = cookie.getValue(); } } if (!username.isEmpty()) { try { po.createProject(po.connection.getCompanyID(username), request.getParameter("name"), request.getParameter("client"), new SimpleDateFormat("yyyy-MM-dd").parse(request.getParameter("startdate")), new SimpleDateFormat("yyyy-MM-dd").parse(request.getParameter("enddate"))); } catch (ParseException ex) { Logger.getLogger(Projectoverviewservlet.class.getName()).log(Level.SEVERE, null, ex); } } //roep create aan } request.getRequestDispatcher("projectoverview.jsp").forward(request, response); } if (request.getParameter("deleteimage") != null) { } if (request.getParameter("importimage") != null) { } if (request.getParameter("koppel") != null) { } if (request.getParameter("addemail") != null) { } if (request.getParameter("deleteemail") != null) { } if (request.getParameter("importemail") != null) { } }