List of usage examples for javax.servlet.http HttpServletRequest getHeaderNames
public Enumeration<String> getHeaderNames();
From source file:org.metis.pull.WdsResourceBean.java
/** * This method gets called by the WdsRdbMapper bean to handle a HTTP * request. This method must be multi-thread capable. Note that since we're * not using Views, this method must return null. * //from w w w .j av a 2s. co m * @param request * the http request that is being serviced * @param response * the response that will be sent back to the service consumer * @return must return null since we're not using a view * @throws Exception */ @SuppressWarnings("unchecked") protected ModelAndView handleRequestInternal(HttpServletRequest request, HttpServletResponse response) throws Exception { LOG.debug(getBeanName() + ": handleRequestInternal - **** new request ****"); // dump the request if trace is on if (LOG.isTraceEnabled()) { LOG.trace(getBeanName() + ":handleRequestInternal - method = " + request.getMethod()); LOG.trace(getBeanName() + ":handleRequestInternal - uri = " + request.getRequestURI()); LOG.trace(getBeanName() + ":handleRequestInternal - protocol = " + request.getProtocol()); LOG.trace(getBeanName() + ":handleRequestInternal - secure = " + request.isSecure()); // dump all the http headers and their values Enumeration<String> headerNames = request.getHeaderNames(); if (headerNames != null) { while (headerNames.hasMoreElements()) { String headerName = headerNames.nextElement(); LOG.trace(getBeanName() + ":handleRequestInternal - " + headerName + " = " + request.getHeader(headerName)); } } if (request.getQueryString() != null) { LOG.trace(getBeanName() + ":handleRequestInternal - queryString = " + request.getQueryString()); } } long currentTime = System.currentTimeMillis(); // give the response a Date header with the current time response.setDateHeader(DATE_HDR, currentTime); // assign the Server header this container's info response.setHeader(SERVER_HDR, getServerInfo()); // determine the HTTP protocol version being used by the client // default version will be 0 int protocolVersion = 0; try { protocolVersion = Integer .parseInt(request.getProtocol().split(FORWARD_SLASH_STR)[1].split(ESC_DOT_STR)[1]); } catch (Exception exc) { LOG.warn(getBeanName() + ": handleRequestInternal - unable to get http protocol " + "version, stack trace follows: "); LOG.error(getBeanName() + ": exception stack trace follows:"); dumpStackTrace(exc.getStackTrace()); } LOG.trace(getBeanName() + ":handleRequestInternal - using this " + "protocol version: " + protocolVersion); /* * Ok, the request first needs to run the security gauntlet * * We do not want to send any error messages back to the client that * would give it a hint that we're invoking SQL statements. This is a * countermeasure for SQL injection probes. */ // see if this RDB is restricting user agents and if so, validate user // agent if ((getAllowedAgents() != null && !getAllowedAgents().isEmpty()) || (getNotAllowedAgents() != null && !getNotAllowedAgents().isEmpty())) { String userAgent = request.getHeader(USER_AGENT_HDR); if (userAgent != null && userAgent.length() > 0) { LOG.debug( getBeanName() + ": handleRequestInternal - validating this " + "user agent: " + userAgent); // Convert to lower case as allowed agents have been // converted to lower case as well userAgent = userAgent.toLowerCase(); boolean allow = false; if (getAllowedAgents() != null && !getAllowedAgents().isEmpty()) { for (String agent : getAllowedAgents()) { LOG.trace(getBeanName() + ": handleRequestInternal - comparing to this " + "allowed agent : " + agent); if (userAgent.indexOf(agent) >= 0) { LOG.trace(getBeanName() + ": handleRequestInternal - this allowed agent " + "was found: " + agent); allow = true; break; } } } else { allow = true; for (String agent : getNotAllowedAgents()) { LOG.trace(getBeanName() + ": handleRequestInternal - comparing to this " + "non-allowed agent : " + agent); if (userAgent.indexOf(agent) >= 0) { LOG.trace(getBeanName() + ": handleRequestInternal - this non-allowed " + "agent was found: " + agent); allow = false; break; } } } if (!allow) { response.sendError(SC_UNAUTHORIZED, "ERROR, user agent " + "is not authorized"); LOG.error(getBeanName() + ": handleRequestInternal - ERROR, user agent is " + "not authorized"); return null; } } else { response.sendError(SC_UNAUTHORIZED, "ERROR, user agent info " + "was not received and is required!"); LOG.error(getBeanName() + ": handleRequestInternal - ERROR, user agent header " + "is required but was not provided by the client"); return null; } } // we do not support chunked transfer encoding, which is a http // 1.1 feature. if (request.getHeader(TRANSFER_ENCODING_HDR) != null && request.getHeader(TRANSFER_ENCODING_HDR).equalsIgnoreCase(CHUNKED)) { response.sendError(SC_BAD_REQUEST, "Chunked tranfer encoding is not " + "supported"); return null; } /* * isSecure returns a boolean indicating whether this request was made * using a secure channel, such as HTTPS. so, if the channel must be * secure, but it is not, then throw an exception and return an error. */ if (isSecure() && !request.isSecure()) { response.sendError(SC_UNAUTHORIZED, "ERROR, channel is not secure"); LOG.error(getBeanName() + ": handleRequestInternal - ERROR, channel is not secure"); return null; } /* * getUserPrincipal() returns a java.security.Principal containing the * name of the user making this request, else it returns null if the * user has not been authenticated. so, if it is mandated that the user * be authenticated, but has not been authenticated, then throw an * exception and return an error */ if (isAuthenticated() && request.getUserPrincipal() == null) { response.sendError(SC_UNAUTHORIZED, "ERROR, user is not authenticated"); LOG.error(getBeanName() + ": handleRequestInternal - ERROR, user is not authenticated"); return null; } /* * Check for valid method - the only supported http methods are GET, * POST, PUT, and DELETE. Here are some good descriptions regarding the * methods and their use with respect to this servlet. * * The GET method is used for projecting data from the DB. So it maps to * a select statement. * * The PUT and POST methods are used for inserting or updating an entity * in the DB. So they map to either an update or insert. * * The DELETE is used for removing one or more entities from the DB. So * it maps to a delete. * * The bean must be assigned at least one of the methods to service */ Method method = null; try { method = Enum.valueOf(Method.class, request.getMethod().toUpperCase()); LOG.debug(getBeanName() + ": handleRequestInternal - processing this method: " + method.toString()); } catch (IllegalArgumentException e) { LOG.error(getBeanName() + ":handleRequestInternal - This method is not allowed [" + request.getMethod() + "]"); response.setHeader("Allow", allowedMethodsRsp); response.sendError(SC_METHOD_NOT_ALLOWED, "This method is not allowed [" + request.getMethod() + "]"); return null; } // do some more method validation; i.e., make sure requested method has // been assigned a SQL statement // // TODO: we may be able to remove this block of code String s1 = null; if (method.isGet() && sqlStmnts4Get == null || method.isPost() && sqlStmnts4Post == null || method.isPut() && sqlStmnts4Put == null || method.isDelete() && sqlStmnts4Delete == null) { response.setHeader("Allow", allowedMethodsRsp); s1 = "HTTP method [" + method + "] is not supported"; response.sendError(SC_METHOD_NOT_ALLOWED, s1); LOG.error(getBeanName() + ":handleRequestInternal - " + s1); return null; } // If the client has specified an 'Accept' header field, then determine // if it is willing or capable of accepting JSON or anything (*/*) // // TODO: what about the client accepting urlencoded strings?? s1 = request.getHeader(ACCEPT_HDR); if (s1 != null && s1.length() > 0) { LOG.debug(getBeanName() + ":handleRequestInternal - client-specified media " + "type in accept header = " + s1); // parse the accept header's content String[] mediaTypes = s1.trim().split(COMMA_STR); boolean match = false; for (String mediaType : mediaTypes) { mediaType = mediaType.trim().toLowerCase(); if (mediaType.startsWith(anyContentType) || mediaType.startsWith(jsonContentType)) { match = true; break; } } if (!match) { LOG.error(getBeanName() + ":handleRequestInternal - client-specified media type of '" + s1 + "' does not include '" + "'" + jsonContentType); response.sendError(SC_NOT_ACCEPTABLE, "client-specified media " + "type of '" + s1 + "' does not include '" + "'" + jsonContentType); return null; } } // pick up the corresponding list of SQL statements for this request List<SqlStmnt> sqlStmnts = null; switch (method) { case GET: sqlStmnts = getSqlStmnts4Get(); break; case DELETE: sqlStmnts = getSqlStmnts4Delete(); break; case PUT: sqlStmnts = getSqlStmnts4Put(); break; case POST: sqlStmnts = getSqlStmnts4Post(); break; default: response.sendError(SC_METHOD_NOT_ALLOWED, "ERROR, unsupported method type: " + method); LOG.error(getBeanName() + ": handleRequestInternal - ERROR, encountered unknown " + "method type: " + method); return null; } // ~~~~~~ EXTRACT PARAMERTERS, IF ANY ~~~~~~~~~~~ // GETs with entity bodies are illegal if (method.isGet() && request.getContentLength() > 0) { response.sendError(SC_BAD_REQUEST, "Client has issued a malformed or illegal request; " + "GET cannot include entity body"); return null; } // the DELETE method also cannot include an entity body; however, the // servlet containers already ignore them. so no need to check for that // see if json object arrived boolean jsonObjectPresent = (method.isPost() || method.isPut()) && (request.getContentLength() > 0 && request.getContentType().equalsIgnoreCase(jsonContentType)); LOG.debug(getBeanName() + ": jsonObjectPresent = " + jsonObjectPresent); // see if this is a PUT with entity. we've learned that for PUTs, // getParameterMap does not work the same across all servlet containers. // so we need take care of this ourselves boolean putWithBodyPresent = (method.isPut()) && (request.getContentLength() > 0 && request.getContentType().equalsIgnoreCase(urlEncodedContentType)); LOG.debug(getBeanName() + ": putWithBodyPresent = " + putWithBodyPresent); // collect incoming parameters and place them in a common bucket // // ~~~~ ALL PARAMETER KEY NAMES MUST BE FORCED TO LOWER CASE ~~~ // List<Map<String, String>> cParams = new ArrayList<Map<String, String>>(); // first, get the incoming query or form parameters (if any); we will // assume that each key has only one parameter. in other words, // we're not dealing with drop-down boxes or things similar if (!putWithBodyPresent && !jsonObjectPresent) { Map<String, String[]> qParams = request.getParameterMap(); if (qParams != null && !qParams.isEmpty()) { Map<String, String> qMap = new HashMap<String, String>(); for (String key : qParams.keySet()) { qMap.put(key.toLowerCase(), qParams.get(key)[0]); } if (!qMap.isEmpty()) { cParams.add(qMap); LOG.debug(getBeanName() + ": query params = " + qMap.toString()); } } } // a put with entity body arrived, so get the parameters from the // body and place them in the common bucket else if (putWithBodyPresent) { try { Map<String, String> putParams = null; // parseUrlEncoded will force keys to lower case putParams = Utils.parseUrlEncoded(request.getInputStream()); if (putParams != null && !putParams.isEmpty()) { cParams.add(putParams); } } catch (Exception exc) { LOG.error(getBeanName() + ": ERROR, caught this " + "exception while parsing urlencoded string: " + exc.toString()); LOG.error(getBeanName() + ": exception stack trace follows:"); dumpStackTrace(exc.getStackTrace()); if (exc.getCause() != null) { LOG.error(getBeanName() + ": Caused by " + exc.getCause().toString()); LOG.error(getBeanName() + ": causing exception stack trace follows:"); dumpStackTrace(exc.getCause().getStackTrace()); } response.sendError(SC_BAD_REQUEST, "urlencoded string parsing error: " + exc.getMessage()); return null; } } // ok, a json object arrived, so get parameters defined in that object // and place them in the common bucket else { // its a json object, so parse it to extract params from it try { List<Map<String, String>> jParams = null; // parseJson will ensure that all passed-in JSON objects have // the same set of identical keys jParams = Utils.parseJson(request.getInputStream()); if (jParams != null && !jParams.isEmpty()) { // if we also got query params then ensure they have the // same set of keys as the json params. why anyone would // ever do this is beyond me, but I'll leave it in for now if (!cParams.isEmpty()) { Map<String, String> cMap = cParams.get(0); Map<String, String> jMap = jParams.get(0); for (String key : cMap.keySet()) { if (jMap.get(key) == null) { String eStr = getBeanName() + ": ERROR, json " + "object key set does not match query " + "param key set"; LOG.error(eStr); response.sendError(SC_BAD_REQUEST, eStr); return null; } } // place the passed in query params in the jParams // bucket jParams.add(cMap); } // assign the jParams bucket to the common bucket cParams = jParams; } } catch (Exception exc) { LOG.error(getBeanName() + ": ERROR, caught this " + "exception while parsing json object: " + exc.toString()); LOG.error(getBeanName() + ": exception stack trace follows:"); dumpStackTrace(exc.getStackTrace()); if (exc.getCause() != null) { LOG.error(getBeanName() + ": Caused by " + exc.getCause().toString()); LOG.error(getBeanName() + ": causing exception stack trace follows:"); dumpStackTrace(exc.getCause().getStackTrace()); } response.sendError(SC_BAD_REQUEST, "json parsing error: " + exc.getMessage()); return null; } } // if trace is on, dump the params (if any) to the log if (LOG.isDebugEnabled()) { if (!cParams.isEmpty()) { for (int i = 0; i < cParams.size(); i++) { LOG.debug(getBeanName() + ": handleRequestInternal - received these params: " + cParams.get(i).toString()); } } else { LOG.debug(getBeanName() + ": handleRequestInternal - did not receive any params"); } } // ensure none of the params' values have been black listed if (!cParams.isEmpty() && getBlackList().length() > 0) { char[] bl = getBlackList().toCharArray(); for (int i = 0; i < cParams.size(); i++) { for (String value : cParams.get(i).values()) { if (Utils.isOnBlackList(value, bl)) { response.sendError(SC_BAD_REQUEST, "encountered black listed character in this param " + "value: " + value); LOG.error(getBeanName() + "handleRequestInternal - encountered black listed " + "character in this param value: " + value); return null; } } } } // find the proper SQL statement based on the incoming parameters' (if // any) keys SqlStmnt sqlStmnt = null; try { // getMatch will try and find a match, even if no params were // provided. // @formatter:off sqlStmnt = (cParams.isEmpty()) ? SqlStmnt.getMatch(sqlStmnts, null) : SqlStmnt.getMatch(sqlStmnts, cParams.get(0).keySet()); // @formatter:on if (sqlStmnt == null && !cParams.isEmpty()) { LOG.error(getBeanName() + ":ERROR, unable to find sql " + "statement with this incoming param set: " + cParams.toString()); response.sendError(SC_INTERNAL_SERVER_ERROR, "internal server error: mapping error"); return null; } else if (sqlStmnt == null) { LOG.warn(getBeanName() + ": warning, unable to find sql " + "statement on first pass, will use extra path info"); } else { LOG.debug(getBeanName() + ": handleRequestInternal - matching sql stmt = " + sqlStmnt.toString()); } } catch (Exception exc) { LOG.error(getBeanName() + ":ERROR, caught this exception " + "while mapping sql to params: " + exc.toString()); LOG.error(getBeanName() + ": exception stack trace follows:"); dumpStackTrace(exc.getStackTrace()); if (exc.getCause() != null) { LOG.error(getBeanName() + ": Caused by " + exc.getCause().toString()); LOG.error(getBeanName() + ": causing exception stack trace follows:"); dumpStackTrace(exc.getCause().getStackTrace()); } response.sendError(SC_INTERNAL_SERVER_ERROR, "mapping error"); return null; } // if getMatch could not find a match - perhaps input params were not // provided - then use the URI's 'extended path' information as an input // param if (sqlStmnt == null) { LOG.debug(getBeanName() + ": invoking getExtraPathInfo"); String[] xtraPathInfo = Utils.getExtraPathInfo(request.getPathInfo()); if (xtraPathInfo != null && xtraPathInfo.length >= 2) { LOG.debug(getBeanName() + ": extra path key:value = " + xtraPathInfo[0] + ":" + xtraPathInfo[1]); } else { LOG.error(getBeanName() + ":ERROR, getExtraPathInfo failed to find info"); response.sendError(SC_INTERNAL_SERVER_ERROR, "internal server error: mapping error"); return null; } // put the xtra path info in the common param bucket and try again cParams.clear(); Map<String, String> xMap = new HashMap<String, String>(); xMap.put(xtraPathInfo[0], xtraPathInfo[1]); cParams.add(xMap); // try again with the extra path info sqlStmnt = SqlStmnt.getMatch(sqlStmnts, xMap.keySet()); if (sqlStmnt == null) { LOG.error(getBeanName() + ":ERROR, unable to find sql " + "statement with this xtra path info: " + cParams.toString()); response.sendError(SC_NOT_FOUND, "internal server error: mapping error"); return null; } } // if we've gotten this far, we've gotten past the security gauntlet and // we have a SQL statement to work with. SqlResult sqlResult = null; try { // get the output stream OutputStream os = response.getOutputStream(); // FIRE IN THE DB HOLE :) if ((sqlResult = sqlStmnt.execute(cParams)) == null) { // execute will have logged the necessary debug/error info response.sendError(SC_INTERNAL_SERVER_ERROR); return null; } // execute went through ok, lets see how to respond switch (method) { case GET: // if a resultset was returned, then set the content type, // convert it to json, and write it out List<Map<String, Object>> listMap = sqlResult.getResultSet(); if (listMap != null) { // tell the client the content type response.setContentType(rspJsonContentType); String jsonOutput = Utils.generateJson(sqlResult.getResultSet()); LOG.trace(getBeanName() + ": returning this payload - " + jsonOutput); os.write(jsonOutput.getBytes()); // ensure that only the client can cache the data and tell // the client how long the data can remain active response.setHeader(CACHE_CNTRL_HDR, (getCacheControl() != null) ? getCacheControl() : DFLT_CACHE_CNTRL_STR); response.setHeader(PRAGMA_HDR, PRAGMA_NO_CACHE_STR); response.setDateHeader(EXPIRES_HDR, currentTime + (getExpires() * 1000)); } else { LOG.debug(getBeanName() + ": NOT returning json message"); } response.setStatus(SC_OK); break; case DELETE: // a DELETE should not send back an entity body response.setStatus(SC_NO_CONTENT); break; case PUT: /* * PUTs are idempotent; therefore, they must provide ALL the * properties that pertain to the resource/entity that they are * creating or updating. Updates cannot be partial updates; they * must be full updates. A PUT is issued by a client that knows * the identifier (in our case, primary key) of the * resource/entity. Therefore, we do not have to send back a * Location header in response to a PUT that has created a * resource. */ if (sqlStmnt.isInsert()) { response.setStatus(SC_CREATED); } else { response.setStatus(SC_OK); } break; case POST: /* * A POST is not idempotent; therefore, it can be used to * perform a 'partial' update, as well as a full create. When * creating a resource via POST, the client does not know the * primary key, and it assumes it will be auto-generated; * therefore, a Location header with auto-generated key must be * returned to client. */ if (sqlStmnt.isInsert()) { response.setStatus(SC_CREATED); // we need to return the new key, but only if it was not a // batch insert. the new key should be returned via the // location header // check if a key holder exists; if not, then table was not // configured with auto-generated key. String locationPath = request.getRequestURL().toString(); if (sqlResult.getKeyHolder() != null) { // key holder exists, check and see if a key is // present if (sqlResult.getKeyHolder().getKey() != null) { String id = sqlResult.getKeyHolder().getKey().toString(); LOG.debug(getBeanName() + ": getKey() returns " + id); locationPath += ("/" + id); LOG.debug(getBeanName() + ": locationPath = " + locationPath); response.setHeader(LOCATION_HDR, locationPath); } // no key, check for multiple keys // TODO: should we send back all keys? else if (sqlResult.getKeyHolder().getKeys() != null) { Map<String, Object> keyMap = sqlResult.getKeyHolder().getKeys(); LOG.debug(getBeanName() + ": getKeys() returns " + keyMap); } // maybe map of keys? // TODO: should we send back all keys? else if (sqlResult.getKeyHolder().getKeyList() != null) { for (Map<String, Object> map : sqlResult.getKeyHolder().getKeyList()) { LOG.debug(getBeanName() + ": Map from getKeyList(): " + map); } } } else { // if it was not an insert, then it was an update. LOG.debug(getBeanName() + ": key holder was not returned for the insert"); } } else { // it was not an insert, so just send back an OK for the // update response.setStatus(SC_OK); } break; default: response.setStatus(SC_OK); break; } } catch (JsonProcessingException exc) { LOG.error(getBeanName() + ":ERROR, caught this " + "JsonProcessingException while trying to gen json " + "message: " + exc.toString()); LOG.error(getBeanName() + ": exception stack trace follows:"); dumpStackTrace(exc.getStackTrace()); if (exc.getCause() != null) { LOG.error(getBeanName() + ": Caused by " + exc.getCause().toString()); LOG.error(getBeanName() + ": causing exception stack trace follows:"); dumpStackTrace(exc.getCause().getStackTrace()); } response.sendError(SC_INTERNAL_SERVER_ERROR, "parsing error"); return null; } catch (Exception exc) { LOG.error(getBeanName() + ":ERROR, caught this " + "Exception while trying to gen json " + "message: " + exc.toString()); LOG.error(getBeanName() + ": exception stack trace follows:"); dumpStackTrace(exc.getStackTrace()); if (exc.getCause() != null) { LOG.error(getBeanName() + ": Caused by " + exc.getCause().toString()); LOG.error(getBeanName() + ": causing exception stack trace follows:"); dumpStackTrace(exc.getCause().getStackTrace()); } response.sendError(SC_INTERNAL_SERVER_ERROR, "parsing error"); return null; } finally { if (sqlResult != null) { SqlResult.enqueue(sqlResult); } } // must return null, because we're not using views! return null; }
From source file:net.ymate.framework.webmvc.AbstractWebErrorProcessor.java
private String __doParseExceptionDetail(Throwable e) { IRequestContext _requestCtx = WebContext.getRequestContext(); HttpServletRequest _request = WebContext.getRequest(); WebContext _context = WebContext.getContext(); ///*ww w . j a va2s .c o m*/ StringBuilder _errSB = new StringBuilder("An exception occurred at ") .append(DateTimeUtils.formatTime(System.currentTimeMillis(), DateTimeUtils.YYYY_MM_DD_HH_MM_SS_SSS)) .append(":\n"); _errSB.append("-------------------------------------------------\n"); _errSB.append("-- ThreadId: ").append(Thread.currentThread().getId()).append("\n"); _errSB.append("-- RequestMapping: ").append(_requestCtx.getRequestMapping()).append("\n"); _errSB.append("-- ResponseStatus: ").append(((GenericResponseWrapper) WebContext.getResponse()).getStatus()) .append("\n"); _errSB.append("-- Method: ").append(_requestCtx.getHttpMethod().name()).append("\n"); _errSB.append("-- RemoteAddrs: ").append(JSON.toJSONString(WebUtils.getRemoteAddrs(_request))).append("\n"); RequestMeta _meta = _context.getAttribute(RequestMeta.class.getName()); if (_meta != null) { _errSB.append("-- Controller: ").append(_meta.getTargetClass().getName()).append(":") .append(_meta.getMethod().getName()).append("\n"); } _errSB.append("-- ContextAttributes:").append("\n"); for (Map.Entry<String, Object> _entry : _context.getAttributes().entrySet()) { if (!StringUtils.startsWith(_entry.getKey(), "net.ymate.platform.webmvc")) { _errSB.append("\t ").append(_entry.getKey()).append(": ") .append(JSON.toJSONString(_entry.getValue())).append("\n"); } } _errSB.append("-- Parameters:").append("\n"); for (Map.Entry<String, Object> _entry : _context.getParameters().entrySet()) { _errSB.append("\t ").append(_entry.getKey()).append(": ").append(JSON.toJSONString(_entry.getValue())) .append("\n"); } _errSB.append("-- Attributes:").append("\n"); Enumeration _enum = _request.getAttributeNames(); while (_enum.hasMoreElements()) { String _attrName = (String) _enum.nextElement(); _errSB.append("\t ").append(_attrName).append(": ") .append(JSON.toJSONString(_request.getAttribute(_attrName))).append("\n"); } _errSB.append("-- Headers:").append("\n"); _enum = _request.getHeaderNames(); while (_enum.hasMoreElements()) { String _headName = (String) _enum.nextElement(); if ("cookie".equalsIgnoreCase(_headName)) { continue; } _errSB.append("\t ").append(_headName).append(": ") .append(JSON.toJSONString(_request.getHeader(_headName))).append("\n"); } _errSB.append("-- Cookies:").append("\n"); Cookie[] _cookies = _request.getCookies(); if (_cookies != null) { for (Cookie _cookie : _cookies) { _errSB.append("\t ").append(_cookie.getName()).append(": ") .append(JSON.toJSONString(_cookie.getValue())).append("\n"); } } _errSB.append("-- Session:").append("\n"); for (Map.Entry<String, Object> _entry : _context.getSession().entrySet()) { _errSB.append("\t ").append(_entry.getKey()).append(": ").append(JSON.toJSONString(_entry.getValue())) .append("\n"); } _errSB.append(__doExceptionToString(e)); _errSB.append("-------------------------------------------------\n"); // return _errSB.toString(); }
From source file:org.betaconceptframework.astroboa.console.security.ResourceApiProxy.java
@GET @PUT/*from www . ja v a2s . c o m*/ @POST @DELETE @Path("{resourceApiPath:.*}") @Produces("*/*") public Response dispatchToAstroboaRepository(@PathParam("resourceApiPath") String resourceApiPath, @Context UriInfo uriInfo, @Context HttpServletRequest httpServletRequest) { try { String repositoryId = AstroboaClientContextHolder.getActiveRepositoryId(); if (StringUtils.isBlank(repositoryId)) { throw new WebApplicationException(HttpURLConnection.HTTP_NOT_FOUND); } CmsRepository activeCmsRepository = AstroboaClientContextHolder.getActiveCmsRepository(); if (activeCmsRepository == null) { throw new WebApplicationException(HttpURLConnection.HTTP_NOT_FOUND); } SecurityContext securityContext = AstroboaClientContextHolder.getActiveSecurityContext(); if (securityContext == null) { throw new WebApplicationException(HttpURLConnection.HTTP_NOT_FOUND); } String username = securityContext.getIdentity(); if (StringUtils.isBlank(username)) { throw new WebApplicationException(HttpURLConnection.HTTP_NOT_FOUND); } String resourceApiBasePath = activeCmsRepository.getRestfulApiBasePath(); if (StringUtils.isBlank(resourceApiBasePath)) { throw new WebApplicationException(HttpURLConnection.HTTP_NOT_FOUND); } HttpSession httpSession = httpServletRequest.getSession(); String password = (String) httpSession.getAttribute("repositoryPassword"); if (StringUtils.isBlank(password)) { throw new WebApplicationException(HttpURLConnection.HTTP_NOT_FOUND); } String httpMethod = httpServletRequest.getMethod(); String requestPath = uriInfo.getPath(); MultivaluedMap<String, String> queryParameters = uriInfo.getQueryParameters(); ClientRequest request = new ClientRequest( "http://localhost:8080" + resourceApiBasePath + "/" + repositoryId + requestPath); // read the payload if the http method is put or post if (httpMethod.equals("POST") || httpMethod.equals("PUT")) { request.body(httpServletRequest.getContentType(), getBody(httpServletRequest)); } // add authorization header (BASIC AUTH) String basicAuthString = username + ":" + password; String authorization = "BASIC " + Base64.encodeBytes(basicAuthString.getBytes()); request.header(HttpHeaders.AUTHORIZATION, authorization); // add headers Enumeration headerNames = httpServletRequest.getHeaderNames(); while (headerNames.hasMoreElements()) { String headerName = (String) headerNames.nextElement(); request.header(headerName, httpServletRequest.getHeader(headerName)); } // add query parameters for (Map.Entry queryParamEntry : queryParameters.entrySet()) { String parameterValue = ((List<String>) queryParamEntry.getValue()).get(0); //ClientRequest (request) encodes the values before the execution of the request. //Therefore we need to decode the values before we feed them to the ClientRequest instance request.queryParameter((String) queryParamEntry.getKey(), URLDecoder.decode(parameterValue, "UTF-8")); } String uri = request.getUri(); ClientResponse clientResponse = request.httpMethod(httpMethod, new GenericType<InputStream>() { }); return clientResponse; } catch (WebApplicationException e) { throw e; } catch (Exception e) { logger.error("A problem occured while sending request to Astroboa Repository", e); throw new WebApplicationException(HttpURLConnection.HTTP_INTERNAL_ERROR); } }
From source file:org.dspace.authenticate.ShibAuthentication.java
/** * Authenticate the given or implicit credentials. This is the heart of the * authentication method: test the credentials for authenticity, and if * accepted, attempt to match (or optionally, create) an * <code>EPerson</code>. If an <code>EPerson</code> is found it is set in * the <code>Context</code> that was passed. * //from ww w . j ava2s .co m * DSpace supports authentication using NetID, or email address. A user's NetID * is a unique identifier from the IdP that identifies a particular user. The * NetID can be of almost any form such as a unique integer, string, or with * Shibboleth 2.0 you can use "targeted ids". You will need to coordinate with * your Shibboleth federation or identity provider. There are three ways to * supply identity information to DSpace: * * 1) NetID from Shibboleth Header (best) * * The NetID-based method is superior because users may change their email * address with the identity provider. When this happens DSpace will not be * able to associate their new address with their old account. * * 2) Email address from Shibboleth Header (okay) * * In the case where a NetID header is not available or not found DSpace * will fall back to identifying a user based-upon their email address. * * 3) Tomcat's Remote User (worst) * * In the event that neither Shibboleth headers are found then as a last * resort DSpace will look at Tomcat's remote user field. This is the least * attractive option because Tomcat has no way to supply additional * attributes about a user. Because of this the autoregister option is not * supported if this method is used. * * Identity Scheme Migration Strategies: * * If you are currently using Email based authentication (either 1 or 2) and * want to upgrade to NetID based authentication then there is an easy path. * Simply enable Shibboleth to pass the NetID attribute and set the netid-header * below to the correct value. When a user attempts to log in to DSpace first * DSpace will look for an EPerson with the passed NetID, however when this * fails DSpace will fall back to email based authentication. Then DSpace will * update the user's EPerson account record to set their netid so all future * authentications for this user will be based upon netid. One thing to note * is that DSpace will prevent an account from switching NetIDs. If an account * already has a NetID set and then they try and authenticate with a * different NetID the authentication will fail. * * @param context * DSpace context, will be modified (ePerson set) upon success. * * @param username * Username (or email address) when method is explicit. Use null * for implicit method. * * @param password * Password for explicit auth, or null for implicit method. * * @param realm * Not used by Shibboleth-based authentication * * @param request * The HTTP request that started this operation, or null if not * applicable. * * @return One of: SUCCESS, BAD_CREDENTIALS, CERT_REQUIRED, NO_SUCH_USER, * BAD_ARGS * <p> * Meaning: <br> * SUCCESS - authenticated OK. <br> * BAD_CREDENTIALS - user exists, but credentials (e.g. passwd) * don't match <br> * CERT_REQUIRED - not allowed to login this way without X.509 cert. * <br> * NO_SUCH_USER - user not found using this method. <br> * BAD_ARGS - user/pw not appropriate for this method * @throws SQLException if database error */ @Override public int authenticate(Context context, String username, String password, String realm, HttpServletRequest request) throws SQLException { // Check if sword compatibility is allowed, and if so see if we can // authenticate based upon a username and password. This is really helpful // if your repo uses Shibboleth but you want some accounts to be able use // sword. This allows this compatibility without installing the password-based // authentication method which has side effects such as allowing users to login // with a username and password from the webui. boolean swordCompatibility = configurationService .getBooleanProperty("authentication-shibboleth.sword.compatibility", true); if (swordCompatibility && username != null && username.length() > 0 && password != null && password.length() > 0) { return swordCompatibility(context, username, password, request); } if (request == null) { log.warn("Unable to authenticate using Shibboleth because the request object is null."); return BAD_ARGS; } // Initialize the additional EPerson metadata. initialize(context); // Log all headers received if debugging is turned on. This is enormously // helpful when debugging shibboleth related problems. if (log.isDebugEnabled()) { log.debug("Starting Shibboleth Authentication"); String message = "Received the following headers:\n"; @SuppressWarnings("unchecked") Enumeration<String> headerNames = request.getHeaderNames(); while (headerNames.hasMoreElements()) { String headerName = headerNames.nextElement(); @SuppressWarnings("unchecked") Enumeration<String> headerValues = request.getHeaders(headerName); while (headerValues.hasMoreElements()) { String headerValue = headerValues.nextElement(); message += "" + headerName + "='" + headerValue + "'\n"; } } log.debug(message); } // Should we auto register new users. boolean autoRegister = configurationService.getBooleanProperty("authentication-shibboleth.autoregister", true); // Four steps to authenticate a user try { // Step 1: Identify User EPerson eperson = findEPerson(context, request); // Step 2: Register New User, if necessary if (eperson == null && autoRegister) eperson = registerNewEPerson(context, request); if (eperson == null) return AuthenticationMethod.NO_SUCH_USER; // Step 3: Update User's Metadata updateEPerson(context, request, eperson); // Step 4: Log the user in. context.setCurrentUser(eperson); request.getSession().setAttribute("shib.authenticated", true); AuthenticateServiceFactory.getInstance().getAuthenticationService().initEPerson(context, request, eperson); log.info(eperson.getEmail() + " has been authenticated via shibboleth."); return AuthenticationMethod.SUCCESS; } catch (Throwable t) { // Log the error, and undo the authentication before returning a failure. log.error("Unable to successfully authenticate using shibboleth for user because of an exception.", t); context.setCurrentUser(null); return AuthenticationMethod.NO_SUCH_USER; } }
From source file:com.liferay.portal.action.LoginAction.java
public static void login(HttpServletRequest req, HttpServletResponse res, String login, String password, boolean rememberMe) throws Exception { CookieKeys.validateSupportCookie(req); HttpSession ses = req.getSession();/* ww w . j av a2 s .co m*/ long userId = GetterUtil.getLong(login); int authResult = Authenticator.FAILURE; Company company = PortalUtil.getCompany(req); // boolean ldaplogin = false; if (PrefsPropsUtil.getString(company.getCompanyId(), PropsUtil.LDAP_AUTH_ENABLED).equals("true")) { LdapContext ctx = PortalLDAPUtil.getContext(company.getCompanyId()); String accountname = ""; try { User user1 = UserLocalServiceUtil.getUserByScreenName(company.getCompanyId(), login); Properties env = new Properties(); String baseProviderURL = PrefsPropsUtil.getString(company.getCompanyId(), PropsUtil.LDAP_BASE_PROVIDER_URL); String userDN = PrefsPropsUtil.getString(company.getCompanyId(), PropsUtil.LDAP_USERS_DN); String baseDN = PrefsPropsUtil.getString(company.getCompanyId(), PropsUtil.LDAP_BASE_DN); String filter = PrefsPropsUtil.getString(company.getCompanyId(), PropsUtil.LDAP_AUTH_SEARCH_FILTER); filter = StringUtil.replace(filter, new String[] { "@company_id@", "@email_address@", "@screen_name@", "@user_id@" }, new String[] { String.valueOf(company.getCompanyId()), "", login, login }); try { SearchControls cons = new SearchControls(SearchControls.SUBTREE_SCOPE, 1, 0, null, false, false); NamingEnumeration enu = ctx.search(userDN, filter, cons); if (enu.hasMoreElements()) { SearchResult result = (SearchResult) enu.nextElement(); accountname = result.getName(); } } catch (Exception e1) { e1.printStackTrace(); } env.put(Context.INITIAL_CONTEXT_FACTORY, PrefsPropsUtil.getString(PropsUtil.LDAP_FACTORY_INITIAL)); env.put(Context.PROVIDER_URL, LDAPUtil.getFullProviderURL(baseProviderURL, baseDN)); env.put(Context.SECURITY_PRINCIPAL, accountname + "," + userDN); env.put(Context.SECURITY_CREDENTIALS, password); new InitialLdapContext(env, null); ldaplogin = true; System.out.println("LDAP Login"); } catch (Exception e) { SessionErrors.add(req, "ldapAuthentication"); e.printStackTrace(); System.out.println("LDAP error login"); return; } } // Map headerMap = new HashMap(); Enumeration enu1 = req.getHeaderNames(); while (enu1.hasMoreElements()) { String name = (String) enu1.nextElement(); Enumeration enu2 = req.getHeaders(name); List headers = new ArrayList(); while (enu2.hasMoreElements()) { String value = (String) enu2.nextElement(); headers.add(value); } headerMap.put(name, (String[]) headers.toArray(new String[0])); } Map parameterMap = req.getParameterMap(); if (company.getAuthType().equals(CompanyImpl.AUTH_TYPE_EA)) { authResult = UserLocalServiceUtil.authenticateByEmailAddress(company.getCompanyId(), login, password, headerMap, parameterMap); userId = UserLocalServiceUtil.getUserIdByEmailAddress(company.getCompanyId(), login); } else if (company.getAuthType().equals(CompanyImpl.AUTH_TYPE_SN)) { authResult = UserLocalServiceUtil.authenticateByScreenName(company.getCompanyId(), login, password, headerMap, parameterMap); userId = UserLocalServiceUtil.getUserIdByScreenName(company.getCompanyId(), login); } else if (company.getAuthType().equals(CompanyImpl.AUTH_TYPE_ID)) { authResult = UserLocalServiceUtil.authenticateByUserId(company.getCompanyId(), userId, password, headerMap, parameterMap); } boolean OTPAuth = false; if (GetterUtil.getBoolean(PropsUtil.get("use.yubicoauthentication"), false) == true) { String otppasswd = ParamUtil.getString(req, "otp"); String userslist = GetterUtil.getString(PropsUtil.get("yubico.users.not.require.otp"), "root"); if (userslist.contains(login)) { authResult = Authenticator.SUCCESS; } else { OTPAuth = SecurityUtils.verifyOTP(otppasswd, login); if (authResult == Authenticator.SUCCESS && OTPAuth) { authResult = Authenticator.SUCCESS; } else { authResult = Authenticator.FAILURE; } } } if (PrefsPropsUtil.getString(company.getCompanyId(), PropsUtil.LDAP_AUTH_ENABLED).equals("true")) { if (!login.equals("root")) { if (ldaplogin) { authResult = Authenticator.SUCCESS; } } } if (authResult == Authenticator.SUCCESS) { boolean loginViaPortal = true; setLoginCookies(req, res, ses, userId, rememberMe); // login to epsos String language = GeneralUtils.getLocale(req); SpiritEhrWsClientInterface webService = EpsosHelperService.getInstance().getWebService(req); InitUserObj initUserObj = EpsosHelperImpl.createEpsosUserInformation(req, res, language, webService, userId, company.getCompanyId(), login, loginViaPortal); SpiritUserClientDto usr = initUserObj.getUsr(); Assertion assertion = initUserObj.getAssertion(); if (Validator.isNotNull(usr)) { req.getSession().setAttribute(EpsosHelperService.EPSOS_LOGIN_INFORMATION_ASSERTIONID, assertion.getID()); req.getSession().setAttribute(EpsosHelperService.EPSOS_LOGIN_INFORMATION_ASSERTION, assertion); req.getSession().setAttribute(EPSOS_LOGIN_INFORMATION_ATTRIBUTE, usr); } else { SessionErrors.add(req, "User doesn't belong to epSOS role so you can't login"); } if (Validator.isNull(usr) && (!(login.equals("root")))) { try { Cookie cookie = new Cookie(CookieKeys.ID, StringPool.BLANK); cookie.setMaxAge(0); cookie.setPath("/"); CookieKeys.addCookie(res, cookie); cookie = new Cookie(CookieKeys.PASSWORD, StringPool.BLANK); cookie.setMaxAge(0); cookie.setPath("/"); CookieKeys.addCookie(res, cookie); try { ses.invalidate(); } catch (Exception e) { } } catch (Exception e) { req.setAttribute(PageContext.EXCEPTION, e); } throw new AuthException(); } } else { throw new AuthException(); } }
From source file:io.hops.hopsworks.api.admin.HDFSUIProxyServlet.java
@Override protected void service(HttpServletRequest servletRequest, HttpServletResponse servletResponse) throws ServletException, IOException { if (servletRequest.getUserPrincipal() == null) { servletResponse.sendError(403, "User is not logged in"); return;/*from w w w .ja v a 2 s .c om*/ } if (!servletRequest.isUserInRole("HOPS_ADMIN")) { servletResponse.sendError(Response.Status.BAD_REQUEST.getStatusCode(), "You don't have the access right for this service"); return; } if (servletRequest.getAttribute(ATTR_TARGET_URI) == null) { servletRequest.setAttribute(ATTR_TARGET_URI, targetUri); } if (servletRequest.getAttribute(ATTR_TARGET_HOST) == null) { servletRequest.setAttribute(ATTR_TARGET_HOST, targetHost); } // Make the Request // note: we won't transfer the protocol version because I'm not // sure it would truly be compatible String proxyRequestUri = rewriteUrlFromRequest(servletRequest); try { String[] targetHost_port = settings.getHDFSWebUIAddress().split(":"); File keyStore = new File(baseHadoopClientsService.getSuperKeystorePath()); File trustStore = new File(baseHadoopClientsService.getSuperTrustStorePath()); // Assume that KeyStore password and Key password are the same Protocol httpsProto = new Protocol("https", new CustomSSLProtocolSocketFactory(keyStore, baseHadoopClientsService.getSuperKeystorePassword(), baseHadoopClientsService.getSuperKeystorePassword(), trustStore, baseHadoopClientsService.getSuperTrustStorePassword()), Integer.parseInt(targetHost_port[1])); Protocol.registerProtocol("https", httpsProto); // Execute the request HttpClientParams params = new HttpClientParams(); params.setCookiePolicy(CookiePolicy.BROWSER_COMPATIBILITY); params.setBooleanParameter(HttpClientParams.ALLOW_CIRCULAR_REDIRECTS, true); HttpClient client = new HttpClient(params); HostConfiguration config = new HostConfiguration(); InetAddress localAddress = InetAddress.getLocalHost(); config.setLocalAddress(localAddress); HttpMethod m = new GetMethod(proxyRequestUri); Enumeration<String> names = servletRequest.getHeaderNames(); while (names.hasMoreElements()) { String headerName = names.nextElement(); String value = servletRequest.getHeader(headerName); if (PASS_THROUGH_HEADERS.contains(headerName)) { //hdfs does not send back the js if encoding is not accepted //but we don't want to accept encoding for the html because we //need to be able to parse it if (headerName.equalsIgnoreCase("accept-encoding") && (servletRequest.getPathInfo() == null || !servletRequest.getPathInfo().contains(".js"))) { continue; } else { m.setRequestHeader(headerName, value); } } } String user = servletRequest.getRemoteUser(); if (user != null && !user.isEmpty()) { m.setRequestHeader("Cookie", "proxy-user" + "=" + URLEncoder.encode(user, "ASCII")); } client.executeMethod(config, m); // Process the response int statusCode = m.getStatusCode(); // Pass the response code. This method with the "reason phrase" is //deprecated but it's the only way to pass the reason along too. //noinspection deprecation servletResponse.setStatus(statusCode, m.getStatusLine().getReasonPhrase()); copyResponseHeaders(m, servletRequest, servletResponse); // Send the content to the client copyResponseEntity(m, servletResponse); } catch (Exception e) { if (e instanceof RuntimeException) { throw (RuntimeException) e; } if (e instanceof ServletException) { throw (ServletException) e; } //noinspection ConstantConditions if (e instanceof IOException) { throw (IOException) e; } throw new RuntimeException(e); } }