Example usage for javax.servlet FilterConfig getServletContext

List of usage examples for javax.servlet FilterConfig getServletContext

Introduction

In this page you can find the example usage for javax.servlet FilterConfig getServletContext.

Prototype

public ServletContext getServletContext();

Source Link

Document

Returns a reference to the ServletContext in which the caller is executing.

Usage

From source file:org.gridsphere.servlets.GridSphereFilter.java

public void init(FilterConfig filterConfig) {
    context = filterConfig.getServletContext();
}

From source file:org.josso.wls10.agent.WLSSessionEnforcementServletFilter.java

public void init(FilterConfig filterConfig) throws ServletException {
    // Validate and update our current component state
    ServletContext ctx = filterConfig.getServletContext();

    if (_agent == null) {

        try {//from  w  ww  .j av a 2  s .c o  m

            Lookup lookup = Lookup.getInstance();
            lookup.init("josso-agent-config.xml"); // For spring compatibility ...

            // We need at least an abstract SSO Agent
            _agent = (WLSSSOAgent) lookup.lookupSSOAgent();
            _agent.start();

            // Enable debug if we use debug ....
            if (log.isDebugEnabled())
                _agent.setDebug(1);

        } catch (Exception e) {
            throw new ServletException("Error starting SSO Agent : " + e.getMessage(), e);
        }

    }

}

From source file:org.apache.hadoop.hdfsproxy.LdapIpDirFilter.java

/** {@inheritDoc} */
public void init(FilterConfig filterConfig) throws ServletException {
    ServletContext context = filterConfig.getServletContext();

    contextPath = context.getContextPath();

    Configuration conf = new Configuration(false);
    conf.addResource("hdfsproxy-default.xml");
    conf.addResource("hdfsproxy-site.xml");
    // extract namenode from source conf.
    String nn = getNamenode(conf);

    InetSocketAddress nAddr = NetUtils.createSocketAddr(nn);
    context.setAttribute("name.node.address", nAddr);
    context.setAttribute("name.conf", conf);
    context.setAttribute(JspHelper.CURRENT_CONF, conf);

    // for storing hostname <--> cluster mapping to decide which source cluster
    // to forward
    context.setAttribute("org.apache.hadoop.hdfsproxy.conf", conf);

    if (lctx == null) {
        Hashtable<String, String> env = new Hashtable<String, String>();
        env.put(InitialLdapContext.INITIAL_CONTEXT_FACTORY,
                conf.get("hdfsproxy.ldap.initial.context.factory", "com.sun.jndi.ldap.LdapCtxFactory"));
        env.put(InitialLdapContext.PROVIDER_URL, conf.get("hdfsproxy.ldap.provider.url"));

        try {/*from w  w w.j  a  v a  2s  .c  om*/
            lctx = new InitialLdapContext(env, null);
        } catch (NamingException ne) {
            throw new ServletException("NamingException in initializing ldap" + ne.toString());
        }

        baseName = conf.get("hdfsproxy.ldap.role.base");
        hdfsIpSchemaStr = conf.get("hdfsproxy.ldap.ip.schema.string", "uniqueMember");
        hdfsIpSchemaStrPrefix = conf.get("hdfsproxy.ldap.ip.schema.string.prefix", "cn=");
        hdfsUidSchemaStr = conf.get("hdfsproxy.ldap.uid.schema.string", "uid");
        hdfsPathSchemaStr = conf.get("hdfsproxy.ldap.hdfs.path.schema.string", "documentLocation");
    }
    LOG.info(contextPath + ":: LdapIpDirFilter initialization successful");
}

From source file:org.ambraproject.cas.filter.GetGuidReturnEmailFilter.java

/**
 * Set the Database Service (which will be used to query the user's Email Address) by way of
 *   the Web Application Context, which is managed by the supporting framework.
 *
 * @param filterConfig Standard Filter configuration values, most notably the Servlet Context
 * @throws ServletException Thrown if there is a problem getting or setting the Database Service
 *//*from ww  w  . ja v  a  2 s . co m*/
public void init(final FilterConfig filterConfig) throws ServletException {
    try {
        databaseService = (DatabaseService) WebApplicationContextUtils
                .getWebApplicationContext(filterConfig.getServletContext()).getBean("databaseService");
    } catch (final Exception e) {
        log.error("Failed to initialize GetGuidReturnEmailFilter.", e);
        throw new ServletException(e);
    }
}

From source file:org.unitime.timetable.filter.PageAccessFilter.java

public void init(FilterConfig cfg) throws ServletException {
    iContext = cfg.getServletContext();
    try {/*from   www . j  a v a  2 s  . c  o m*/
        Document config = (new SAXReader())
                .read(cfg.getServletContext().getResource(cfg.getInitParameter("config")));
        for (Iterator i = config.getRootElement().element("action-mappings").elementIterator("action"); i
                .hasNext();) {
            Element action = (Element) i.next();
            String path = action.attributeValue("path");
            String input = action.attributeValue("input");
            if (path != null && input != null) {
                iPath2Tile.put(path + ".do", input);
            }
        }
    } catch (Exception e) {
        sLog.error("Unable to read config " + cfg.getInitParameter("config") + ", reason: " + e.getMessage());
    }
    if (cfg.getInitParameter("debug-time") != null) {
        debugTime = Long.parseLong(cfg.getInitParameter("debug-time"));
    }
    if (cfg.getInitParameter("dump-time") != null) {
        dumpTime = Long.parseLong(cfg.getInitParameter("dump-time"));
    }
    if (cfg.getInitParameter("session-attributes") != null) {
        dumpSessionAttribues = Boolean.parseBoolean(cfg.getInitParameter("session-attributes"));
    }
}

From source file:org.apache.hadoop.gateway.dispatch.DefaultHttpClientFactory.java

private int getMaxConnections(FilterConfig filterConfig) {
    int maxConnections = 32;
    GatewayConfig config = (GatewayConfig) filterConfig.getServletContext()
            .getAttribute(GatewayConfig.GATEWAY_CONFIG_ATTRIBUTE);
    if (config != null) {
        maxConnections = config.getHttpClientMaxConnections();
    }// w  w w .  j av  a2  s. c  o  m
    String str = filterConfig.getInitParameter("httpclient.maxConnections");
    if (str != null) {
        try {
            maxConnections = Integer.parseInt(str);
        } catch (NumberFormatException e) {
            // Ignore it and use the default.
        }
    }
    return maxConnections;
}

From source file:org.sakaiproject.tool.section.filter.RoleFilter.java

public void init(FilterConfig filterConfig) throws ServletException {
    if (logger.isInfoEnabled())
        logger.info("Initializing sections role filter");

    ac = (ApplicationContext) filterConfig.getServletContext()
            .getAttribute(WebApplicationContext.ROOT_WEB_APPLICATION_CONTEXT_ATTRIBUTE);

    authnBeanName = filterConfig.getInitParameter("authnServiceBean");
    authzBeanName = filterConfig.getInitParameter("authzServiceBean");
    contextBeanName = filterConfig.getInitParameter("contextManagementServiceBean");
    authorizationFilterConfigurationBeanName = filterConfig
            .getInitParameter("authorizationFilterConfigurationBean");
    selectSiteRedirect = filterConfig.getInitParameter("selectSiteRedirect");
}

From source file:info.magnolia.cms.filters.CompositeFilter.java

/**
 * The first time called by the main filter.
 *///from  w ww  .  jav  a2s .c  om
public void initFilters(FilterConfig filterConfig) {
    for (MgnlFilter filter : filters) {
        try {
            log.info("Initializing filter [{}]", filter.getName());
            filter.init(new CustomFilterConfig(filter.getName(), filterConfig.getServletContext(),
                    ServletUtils.initParametersToMap(filterConfig)));
        } catch (Exception e) {
            log.error("Error initializing filter [" + filter.getName() + "]", e);
        }
    }
}

From source file:org.apache.hadoop.gateway.hdfs.dispatch.WebHdfsHaDispatchTest.java

@Test
public void testConnectivityFailover() throws Exception {
    String serviceName = "WEBHDFS";
    HaDescriptor descriptor = HaDescriptorFactory.createDescriptor();
    descriptor.addServiceConfig(/* w w  w  .ja va 2s  .  c  om*/
            HaDescriptorFactory.createServiceConfig(serviceName, "true", "1", "1000", "2", "1000", null, null));
    HaProvider provider = new DefaultHaProvider(descriptor);
    URI uri1 = new URI("http://unreachable-host");
    URI uri2 = new URI("http://reachable-host");
    ArrayList<String> urlList = new ArrayList<String>();
    urlList.add(uri1.toString());
    urlList.add(uri2.toString());
    provider.addHaService(serviceName, urlList);
    FilterConfig filterConfig = EasyMock.createNiceMock(FilterConfig.class);
    ServletContext servletContext = EasyMock.createNiceMock(ServletContext.class);

    EasyMock.expect(filterConfig.getServletContext()).andReturn(servletContext).anyTimes();
    EasyMock.expect(servletContext.getAttribute(HaServletContextListener.PROVIDER_ATTRIBUTE_NAME))
            .andReturn(provider).anyTimes();

    BasicHttpParams params = new BasicHttpParams();

    HttpUriRequest outboundRequest = EasyMock.createNiceMock(HttpRequestBase.class);
    EasyMock.expect(outboundRequest.getMethod()).andReturn("GET").anyTimes();
    EasyMock.expect(outboundRequest.getURI()).andReturn(uri1).anyTimes();
    EasyMock.expect(outboundRequest.getParams()).andReturn(params).anyTimes();

    HttpServletRequest inboundRequest = EasyMock.createNiceMock(HttpServletRequest.class);
    EasyMock.expect(inboundRequest.getRequestURL()).andReturn(new StringBuffer(uri2.toString())).once();
    EasyMock.expect(inboundRequest.getAttribute("dispatch.ha.failover.counter")).andReturn(new AtomicInteger(0))
            .once();
    EasyMock.expect(inboundRequest.getAttribute("dispatch.ha.failover.counter")).andReturn(new AtomicInteger(1))
            .once();

    HttpServletResponse outboundResponse = EasyMock.createNiceMock(HttpServletResponse.class);
    EasyMock.expect(outboundResponse.getOutputStream())
            .andAnswer(new IAnswer<SynchronousServletOutputStreamAdapter>() {
                @Override
                public SynchronousServletOutputStreamAdapter answer() throws Throwable {
                    return new SynchronousServletOutputStreamAdapter() {
                        @Override
                        public void write(int b) throws IOException {
                            throw new IOException("unreachable-host");
                        }
                    };
                }
            }).once();
    EasyMock.replay(filterConfig, servletContext, outboundRequest, inboundRequest, outboundResponse);
    Assert.assertEquals(uri1.toString(), provider.getActiveURL(serviceName));
    WebHdfsHaDispatch dispatch = new WebHdfsHaDispatch();
    dispatch.setHttpClient(new DefaultHttpClient());
    dispatch.setHaProvider(provider);
    dispatch.init();
    long startTime = System.currentTimeMillis();
    try {
        dispatch.executeRequest(outboundRequest, inboundRequest, outboundResponse);
    } catch (IOException e) {
        //this is expected after the failover limit is reached
    }
    long elapsedTime = System.currentTimeMillis() - startTime;
    Assert.assertEquals(uri2.toString(), provider.getActiveURL(serviceName));
    //test to make sure the sleep took place
    Assert.assertTrue(elapsedTime > 1000);
}

From source file:org.apache.hadoop.gateway.ha.dispatch.DefaultHaDispatchTest.java

@Test
public void testConnectivityFailover() throws Exception {
    String serviceName = "OOZIE";
    HaDescriptor descriptor = HaDescriptorFactory.createDescriptor();
    descriptor.addServiceConfig(//  w  w  w.  j a va  2s.  c o m
            HaDescriptorFactory.createServiceConfig(serviceName, "true", "1", "1000", "2", "1000", null, null));
    HaProvider provider = new DefaultHaProvider(descriptor);
    URI uri1 = new URI("http://unreachable-host");
    URI uri2 = new URI("http://reachable-host");
    ArrayList<String> urlList = new ArrayList<String>();
    urlList.add(uri1.toString());
    urlList.add(uri2.toString());
    provider.addHaService(serviceName, urlList);
    FilterConfig filterConfig = EasyMock.createNiceMock(FilterConfig.class);
    ServletContext servletContext = EasyMock.createNiceMock(ServletContext.class);

    EasyMock.expect(filterConfig.getServletContext()).andReturn(servletContext).anyTimes();
    EasyMock.expect(servletContext.getAttribute(HaServletContextListener.PROVIDER_ATTRIBUTE_NAME))
            .andReturn(provider).anyTimes();

    BasicHttpParams params = new BasicHttpParams();

    HttpUriRequest outboundRequest = EasyMock.createNiceMock(HttpRequestBase.class);
    EasyMock.expect(outboundRequest.getMethod()).andReturn("GET").anyTimes();
    EasyMock.expect(outboundRequest.getURI()).andReturn(uri1).anyTimes();
    EasyMock.expect(outboundRequest.getParams()).andReturn(params).anyTimes();

    HttpServletRequest inboundRequest = EasyMock.createNiceMock(HttpServletRequest.class);
    EasyMock.expect(inboundRequest.getRequestURL()).andReturn(new StringBuffer(uri2.toString())).once();
    EasyMock.expect(inboundRequest.getAttribute("dispatch.ha.failover.counter")).andReturn(new AtomicInteger(0))
            .once();
    EasyMock.expect(inboundRequest.getAttribute("dispatch.ha.failover.counter")).andReturn(new AtomicInteger(1))
            .once();

    HttpServletResponse outboundResponse = EasyMock.createNiceMock(HttpServletResponse.class);
    EasyMock.expect(outboundResponse.getOutputStream())
            .andAnswer(new IAnswer<SynchronousServletOutputStreamAdapter>() {
                @Override
                public SynchronousServletOutputStreamAdapter answer() throws Throwable {
                    return new SynchronousServletOutputStreamAdapter() {
                        @Override
                        public void write(int b) throws IOException {
                            throw new IOException("unreachable-host");
                        }
                    };
                }
            }).once();
    EasyMock.replay(filterConfig, servletContext, outboundRequest, inboundRequest, outboundResponse);
    Assert.assertEquals(uri1.toString(), provider.getActiveURL(serviceName));
    DefaultHaDispatch dispatch = new DefaultHaDispatch();
    dispatch.setHttpClient(new DefaultHttpClient());
    dispatch.setHaProvider(provider);
    dispatch.setServiceRole(serviceName);
    dispatch.init();
    long startTime = System.currentTimeMillis();
    try {
        dispatch.executeRequest(outboundRequest, inboundRequest, outboundResponse);
    } catch (IOException e) {
        //this is expected after the failover limit is reached
    }
    long elapsedTime = System.currentTimeMillis() - startTime;
    Assert.assertEquals(uri2.toString(), provider.getActiveURL(serviceName));
    //test to make sure the sleep took place
    Assert.assertTrue(elapsedTime > 1000);
}