List of usage examples for java.lang Thread setContextClassLoader
public void setContextClassLoader(ClassLoader cl)
From source file:net.lightbody.bmp.proxy.jetty.jetty.servlet.WebApplicationContext.java
/** Start the Web Application. * @exception IOException /*from www . j a va 2s . com*/ */ protected void doStart() throws Exception { if (isStarted()) return; // save context classloader Thread thread = Thread.currentThread(); ClassLoader lastContextLoader = thread.getContextClassLoader(); MultiException mex = null; try { // Find the webapp resolveWebApp(); // Get the handler getServletHandler(); _configurations = loadConfigurations(); // initialize the classloader configureClassPath(); initClassLoader(true); thread.setContextClassLoader(getClassLoader()); initialize(); // Do the default configuration configureDefaults(); // Set classpath for Jasper. Map.Entry entry = _webAppHandler.getHolderEntry("test.jsp"); if (entry != null) { ServletHolder jspHolder = (ServletHolder) entry.getValue(); if (jspHolder != null && jspHolder.getInitParameter("classpath") == null) { String fileClassPath = getFileClassPath(); jspHolder.setInitParameter("classpath", fileClassPath); if (log.isDebugEnabled()) log.debug("Set classpath=" + fileClassPath + " for " + jspHolder); } } // configure webapp configureWebApp(); // If we have servlets, don't init them yet _webAppHandler.setAutoInitializeServlets(false); // Start handlers super.doStart(); mex = new MultiException(); // Context listeners if (_contextListeners != null && _webAppHandler != null) { ServletContextEvent event = new ServletContextEvent(getServletContext()); for (int i = 0; i < LazyList.size(_contextListeners); i++) { try { ((ServletContextListener) LazyList.get(_contextListeners, i)).contextInitialized(event); } catch (Exception ex) { mex.add(ex); } } } // OK to Initialize servlets now if (_webAppHandler != null && _webAppHandler.isStarted()) { try { _webAppHandler.initializeServlets(); } catch (Exception ex) { mex.add(ex); } } } catch (Exception e) { log.warn("Configuration error on " + _war, e); throw e; } finally { thread.setContextClassLoader(lastContextLoader); } if (mex != null) mex.ifExceptionThrow(); }
From source file:org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MapReduceLauncher.java
@Override public PigStats launchPig(PhysicalPlan php, String grpName, PigContext pc) throws PlanException, VisitorException, IOException, ExecException, JobCreationException, Exception { long sleepTime = 500; aggregateWarning = Boolean.valueOf(pc.getProperties().getProperty("aggregate.warning")); MROperPlan mrp = compile(php, pc);//from w ww .j av a 2 s . co m ConfigurationValidator.validatePigProperties(pc.getProperties()); Configuration conf = ConfigurationUtil.toConfiguration(pc.getProperties()); MRExecutionEngine exe = (MRExecutionEngine) pc.getExecutionEngine(); Properties defaultProperties = new Properties(); JobConf defaultJobConf = exe.getLocalConf(); Utils.recomputeProperties(defaultJobConf, defaultProperties); // This is a generic JobClient for checking progress of the jobs JobClient statsJobClient = new JobClient(exe.getJobConf()); JobControlCompiler jcc = new JobControlCompiler(pc, conf, ConfigurationUtil.toConfiguration(defaultProperties)); MRScriptState.get().addWorkflowAdjacenciesToConf(mrp, conf); // start collecting statistics PigStats.start(pc.getExecutionEngine().instantiatePigStats()); MRPigStatsUtil.startCollection(pc, statsJobClient, jcc, mrp); // Find all the intermediate data stores. The plan will be destroyed during compile/execution // so this needs to be done before. MRIntermediateDataVisitor intermediateVisitor = new MRIntermediateDataVisitor(mrp); intermediateVisitor.visit(); List<Job> failedJobs = new LinkedList<Job>(); List<NativeMapReduceOper> failedNativeMR = new LinkedList<NativeMapReduceOper>(); List<Job> completeFailedJobsInThisRun = new LinkedList<Job>(); List<Job> succJobs = new LinkedList<Job>(); int totalMRJobs = mrp.size(); int numMRJobsCompl = 0; double lastProg = -1; long scriptSubmittedTimestamp = System.currentTimeMillis(); //create the exception handler for the job control thread //and register the handler with the job control thread JobControlThreadExceptionHandler jctExceptionHandler = new JobControlThreadExceptionHandler(); boolean stop_on_failure = Boolean.valueOf(pc.getProperties().getProperty("stop.on.failure", "false")); // jc is null only when mrp.size == 0 while (mrp.size() != 0) { jc = jcc.compile(mrp, grpName); if (jc == null) { List<MapReduceOper> roots = new LinkedList<MapReduceOper>(); roots.addAll(mrp.getRoots()); // run the native mapreduce roots first then run the rest of the roots for (MapReduceOper mro : roots) { if (mro instanceof NativeMapReduceOper) { NativeMapReduceOper natOp = (NativeMapReduceOper) mro; try { MRScriptState.get().emitJobsSubmittedNotification(1); natOp.runJob(); numMRJobsCompl++; } catch (IOException e) { mrp.trimBelow(natOp); failedNativeMR.add(natOp); String msg = "Error running native mapreduce" + " operator job :" + natOp.getJobId() + e.getMessage(); String stackTrace = Utils.getStackStraceStr(e); LogUtils.writeLog(msg, stackTrace, pc.getProperties().getProperty("pig.logfile"), log); log.info(msg); if (stop_on_failure) { int errCode = 6017; throw new ExecException(msg, errCode, PigException.REMOTE_ENVIRONMENT); } } double prog = ((double) numMRJobsCompl) / totalMRJobs; notifyProgress(prog, lastProg); lastProg = prog; mrp.remove(natOp); } } continue; } // Initially, all jobs are in wait state. List<Job> jobsWithoutIds = jc.getWaitingJobs(); log.info(jobsWithoutIds.size() + " map-reduce job(s) waiting for submission."); //notify listeners about jobs submitted MRScriptState.get().emitJobsSubmittedNotification(jobsWithoutIds.size()); // update Pig stats' job DAG with just compiled jobs MRPigStatsUtil.updateJobMroMap(jcc.getJobMroMap()); // determine job tracker url String jobTrackerLoc; JobConf jobConf = jobsWithoutIds.get(0).getJobConf(); try { String port = jobConf.get(MRConfiguration.JOB_TRACKER_HTTP_ADDRESS); String jobTrackerAdd = jobConf.get(MRConfiguration.JOB_TRACKER); jobTrackerLoc = jobTrackerAdd.substring(0, jobTrackerAdd.indexOf(":")) + port.substring(port.indexOf(":")); } catch (Exception e) { // Could not get the job tracker location, most probably we are running in local mode. // If it is the case, we don't print out job tracker location, // because it is meaningless for local mode. jobTrackerLoc = null; log.debug("Failed to get job tracker location."); } completeFailedJobsInThisRun.clear(); // Set the thread UDFContext so registered classes are available. final UDFContext udfContext = UDFContext.getUDFContext(); Thread jcThread = new Thread(jc, "JobControl") { @Override public void run() { UDFContext.setUdfContext(udfContext.clone()); //PIG-2576 super.run(); } }; jcThread.setUncaughtExceptionHandler(jctExceptionHandler); jcThread.setContextClassLoader(PigContext.getClassLoader()); // mark the times that the jobs were submitted so it's reflected in job history props for (Job job : jc.getWaitingJobs()) { JobConf jobConfCopy = job.getJobConf(); jobConfCopy.set("pig.script.submitted.timestamp", Long.toString(scriptSubmittedTimestamp)); jobConfCopy.set("pig.job.submitted.timestamp", Long.toString(System.currentTimeMillis())); job.setJobConf(jobConfCopy); } //All the setup done, now lets launch the jobs. jcThread.start(); try { // a flag whether to warn failure during the loop below, so users can notice failure earlier. boolean warn_failure = true; // Now wait, till we are finished. while (!jc.allFinished()) { try { jcThread.join(sleepTime); } catch (InterruptedException e) { } List<Job> jobsAssignedIdInThisRun = new ArrayList<Job>(); for (Job job : jobsWithoutIds) { if (job.getAssignedJobID() != null) { jobsAssignedIdInThisRun.add(job); log.info("HadoopJobId: " + job.getAssignedJobID()); // display the aliases being processed MapReduceOper mro = jcc.getJobMroMap().get(job); if (mro != null) { String alias = MRScriptState.get().getAlias(mro); log.info("Processing aliases " + alias); String aliasLocation = MRScriptState.get().getAliasLocation(mro); log.info("detailed locations: " + aliasLocation); } if (!HadoopShims.isHadoopYARN() && jobTrackerLoc != null) { log.info("More information at: http://" + jobTrackerLoc + "/jobdetails.jsp?jobid=" + job.getAssignedJobID()); } // update statistics for this job so jobId is set MRPigStatsUtil.addJobStats(job); MRScriptState.get().emitJobStartedNotification(job.getAssignedJobID().toString()); } else { // This job is not assigned an id yet. } } jobsWithoutIds.removeAll(jobsAssignedIdInThisRun); double prog = (numMRJobsCompl + calculateProgress(jc)) / totalMRJobs; if (notifyProgress(prog, lastProg)) { List<Job> runnJobs = jc.getRunningJobs(); if (runnJobs != null) { StringBuilder msg = new StringBuilder(); for (Object object : runnJobs) { Job j = (Job) object; if (j != null) { msg.append(j.getAssignedJobID()).append(","); } } if (msg.length() > 0) { msg.setCharAt(msg.length() - 1, ']'); log.info("Running jobs are [" + msg); } } lastProg = prog; } // collect job stats by frequently polling of completed jobs (PIG-1829) MRPigStatsUtil.accumulateStats(jc); // if stop_on_failure is enabled, we need to stop immediately when any job has failed checkStopOnFailure(stop_on_failure); // otherwise, we just display a warning message if there's any failure if (warn_failure && !jc.getFailedJobs().isEmpty()) { // we don't warn again for this group of jobs warn_failure = false; log.warn("Ooops! Some job has failed! Specify -stop_on_failure if you " + "want Pig to stop immediately on failure."); } } //check for the jobControlException first //if the job controller fails before launching the jobs then there are //no jobs to check for failure if (jobControlException != null) { if (jobControlException instanceof PigException) { if (jobControlExceptionStackTrace != null) { LogUtils.writeLog("Error message from job controller", jobControlExceptionStackTrace, pc.getProperties().getProperty("pig.logfile"), log); } throw jobControlException; } else { int errCode = 2117; String msg = "Unexpected error when launching map reduce job."; throw new ExecException(msg, errCode, PigException.BUG, jobControlException); } } if (!jc.getFailedJobs().isEmpty()) { // stop if stop_on_failure is enabled checkStopOnFailure(stop_on_failure); // If we only have one store and that job fail, then we sure // that the job completely fail, and we shall stop dependent jobs for (Job job : jc.getFailedJobs()) { completeFailedJobsInThisRun.add(job); log.info("job " + job.getAssignedJobID() + " has failed! Stop running all dependent jobs"); } failedJobs.addAll(jc.getFailedJobs()); } int removedMROp = jcc.updateMROpPlan(completeFailedJobsInThisRun); numMRJobsCompl += removedMROp; List<Job> jobs = jc.getSuccessfulJobs(); jcc.moveResults(jobs); succJobs.addAll(jobs); // collecting final statistics MRPigStatsUtil.accumulateStats(jc); } catch (Exception e) { throw e; } finally { jc.stop(); } } MRScriptState.get().emitProgressUpdatedNotification(100); log.info("100% complete"); boolean failed = false; if (failedNativeMR.size() > 0) { failed = true; } if (Boolean.valueOf(pc.getProperties().getProperty(PigConfiguration.PIG_DELETE_TEMP_FILE, "true"))) { // Clean up all the intermediate data for (String path : intermediateVisitor.getIntermediate()) { // Skip non-file system paths such as hbase, see PIG-3617 if (HadoopShims.hasFileSystemImpl(new Path(path), conf)) { FileLocalizer.delete(path, pc); } } } // Look to see if any jobs failed. If so, we need to report that. if (failedJobs != null && failedJobs.size() > 0) { Exception backendException = null; for (Job fj : failedJobs) { try { getStats(fj, true, pc); } catch (Exception e) { backendException = e; } List<POStore> sts = jcc.getStores(fj); for (POStore st : sts) { failureMap.put(st.getSFile(), backendException); } MRPigStatsUtil.setBackendException(fj, backendException); } failed = true; } // stats collection is done, log the results MRPigStatsUtil.stopCollection(true); // PigStatsUtil.stopCollection also computes the return code based on // total jobs to run, jobs successful and jobs failed failed = failed || !PigStats.get().isSuccessful(); Map<Enum, Long> warningAggMap = new HashMap<Enum, Long>(); if (succJobs != null) { for (Job job : succJobs) { List<POStore> sts = jcc.getStores(job); for (POStore st : sts) { if (Utils.isLocal(pc, job.getJobConf())) { HadoopShims.storeSchemaForLocal(job, st); } if (!st.isTmpStore()) { // create an "_SUCCESS" file in output location if // output location is a filesystem dir createSuccessFile(job, st); } else { log.debug("Successfully stored result in: \"" + st.getSFile().getFileName() + "\""); } } getStats(job, false, pc); if (aggregateWarning) { computeWarningAggregate(job, warningAggMap); } } } if (aggregateWarning) { CompilationMessageCollector.logAggregate(warningAggMap, MessageType.Warning, log); } if (!failed) { log.info("Success!"); } else { if (succJobs != null && succJobs.size() > 0) { log.info("Some jobs have failed! Stop running all dependent jobs"); } else { log.info("Failed!"); } } jcc.reset(); int ret = failed ? ((succJobs != null && succJobs.size() > 0) ? ReturnCode.PARTIAL_FAILURE : ReturnCode.FAILURE) : ReturnCode.SUCCESS; PigStats pigStats = PigStatsUtil.getPigStats(ret); // run cleanup for all of the stores for (OutputStats output : pigStats.getOutputStats()) { POStore store = output.getPOStore(); try { if (!output.isSuccessful()) { store.getStoreFunc().cleanupOnFailure(store.getSFile().getFileName(), new org.apache.hadoop.mapreduce.Job(output.getConf())); } else { store.getStoreFunc().cleanupOnSuccess(store.getSFile().getFileName(), new org.apache.hadoop.mapreduce.Job(output.getConf())); } } catch (IOException e) { throw new ExecException(e); } catch (AbstractMethodError nsme) { // Just swallow it. This means we're running against an // older instance of a StoreFunc that doesn't implement // this method. } } return pigStats; }
From source file:com.gatf.generator.core.GatfTestGeneratorMojo.java
@SuppressWarnings("rawtypes") public void execute() { if (!isEnabled()) { getLog().info("Skipping gatf-plugin execution...."); return;/* w ww . j a v a 2s . co m*/ } if (configFile != null) { try { InputStream io = new FileInputStream(configFile); XStream xstream = new XStream(new DomDriver("UTF-8")); xstream.processAnnotations(new Class[] { GatfConfiguration.class }); xstream.alias("testPaths", String[].class); xstream.alias("testPath", String.class); xstream.alias("soapWsdlKeyPairs", String[].class); xstream.alias("soapWsdlKeyPair", String.class); xstream.alias("string", String.class); GatfConfiguration config = (GatfConfiguration) xstream.fromXML(io); setDebugEnabled(config.isDebugEnabled()); setEnabled(config.isEnabled()); setInDataType(config.getRequestDataType()); setTestPaths(config.getTestPaths()); setSoapWsdlKeyPairs(config.getSoapWsdlKeyPairs()); setUrlPrefix(config.getUrlPrefix()); setResourcepath(config.getResourcepath()); setInDataType(config.getRequestDataType()); setOutDataType(config.getResponseDataType()); setOverrideSecure(config.isOverrideSecure()); setUrlSuffix(config.getUrlSuffix()); setUseSoapClient(config.isUseSoapClient()); setTestCaseFormat(config.getTestCaseFormat()); setPostmanCollectionVersion(config.getPostmanCollectionVersion()); } catch (Exception e) { throw new AssertionError(e); } } if (getResourcepath() == null) { setResourcepath("."); getLog().info( "No resource path specified, using the current working directory to generate testcases..."); } else { File dir = new File(getResourcepath()); if (!dir.exists()) { dir.mkdirs(); } } Thread currentThread = Thread.currentThread(); ClassLoader oldClassLoader = currentThread.getContextClassLoader(); try { currentThread.setContextClassLoader(getClassLoader()); getLog().info("Inside execute"); List<Class> allClasses = new ArrayList<Class>(); if (getTestPaths() != null) { for (String item : getTestPaths()) { if (item.endsWith(".*")) { List<Class> classes = ClassLoaderUtils.getClasses(item.substring(0, item.indexOf(".*"))); if (classes != null && classes.size() > 0) { allClasses.addAll(classes); getLog().info("Adding package " + item); } else { getLog().error("Error:package not found - " + item); } } else { try { allClasses.add(Thread.currentThread().getContextClassLoader().loadClass(item)); getLog().info("Adding class " + item); } catch (Exception e) { getLog().error("Error:class not found - " + item); } } } if (!allClasses.isEmpty()) { generateRestTestCases(allClasses); } } else { getLog().info("Nothing to generate.."); } generateSoapTestCases(); getLog().info("Done execute"); } catch (Exception e) { e.printStackTrace(); } finally { currentThread.setContextClassLoader(oldClassLoader); } }
From source file:com.meltmedia.cadmium.servlets.ClassLoaderLeakPreventor.java
/** * Since Keep-Alive-Timer thread may have terminated, but still be referenced, we need to make sure it does not * reference this classloader./*from www . j a v a 2s .c o m*/ */ protected void unsetCachedKeepAliveTimer() { Object keepAliveCache = getStaticFieldValue("sun.net.www.http.HttpClient", "kac", true); if (keepAliveCache != null) { final Thread keepAliveTimer = getFieldValue(keepAliveCache, "keepAliveTimer"); if (keepAliveTimer != null) { if (isWebAppClassLoaderOrChild(keepAliveTimer.getContextClassLoader())) { keepAliveTimer.setContextClassLoader(getWebApplicationClassLoader().getParent()); error("ContextClassLoader of sun.net.www.http.HttpClient cached Keep-Alive-Timer set to parent instead"); } } } }
From source file:org.codehaus.groovy.grails.orm.hibernate.cfg.GrailsAnnotationConfiguration.java
/** * Overrides the default behaviour to including binding of Grails domain classes. *///from w ww .j av a2 s . c om @Override protected void secondPassCompile() throws MappingException { final Thread currentThread = Thread.currentThread(); final ClassLoader originalContextLoader = currentThread.getContextClassLoader(); if (!configLocked) { if (LOG.isDebugEnabled()) { LOG.debug("[GrailsAnnotationConfiguration] [" + domainClasses.size() + "] Grails domain classes to bind to persistence runtime"); } // do Grails class configuration DefaultGrailsDomainConfiguration.configureDomainBinder(grailsApplication, domainClasses); for (GrailsDomainClass domainClass : domainClasses) { final String fullClassName = domainClass.getFullName(); String hibernateConfig = fullClassName.replace('.', '/') + ".hbm.xml"; final ClassLoader loader = originalContextLoader; // don't configure Hibernate mapped classes if (loader.getResource(hibernateConfig) != null) continue; final Mappings mappings = super.createMappings(); if (!GrailsHibernateUtil.usesDatasource(domainClass, dataSourceName)) { continue; } if (LOG.isDebugEnabled()) { LOG.debug("[GrailsAnnotationConfiguration] Binding persistent class [" + fullClassName + "]"); } Mapping m = GrailsDomainBinder.getMapping(domainClass); mappings.setAutoImport(m == null || m.getAutoImport()); GrailsDomainBinder.bindClass(domainClass, mappings, sessionFactoryBeanName); } } try { currentThread.setContextClassLoader(grailsApplication.getClassLoader()); super.secondPassCompile(); } finally { currentThread.setContextClassLoader(originalContextLoader); } configLocked = true; }
From source file:org.grails.orm.hibernate.cfg.GrailsAnnotationConfiguration.java
/** * Overrides the default behaviour to including binding of Grails domain classes. *//*from www .j a v a2 s . c o m*/ @Override protected void secondPassCompile() throws MappingException { final Thread currentThread = Thread.currentThread(); final ClassLoader originalContextLoader = currentThread.getContextClassLoader(); if (!configLocked) { if (LOG.isDebugEnabled()) LOG.debug("[GrailsAnnotationConfiguration] [" + domainClasses.size() + "] Grails domain classes to bind to persistence runtime"); // do Grails class configuration DefaultGrailsDomainConfiguration.configureDomainBinder(grailsApplication, domainClasses); for (GrailsDomainClass domainClass : domainClasses) { final String fullClassName = domainClass.getFullName(); String hibernateConfig = fullClassName.replace('.', '/') + ".hbm.xml"; final ClassLoader loader = originalContextLoader; // don't configure Hibernate mapped classes if (loader.getResource(hibernateConfig) != null) continue; final Mappings mappings = super.createMappings(); if (!GrailsHibernateUtil.usesDatasource(domainClass, dataSourceName)) { continue; } LOG.debug("[GrailsAnnotationConfiguration] Binding persistent class [" + fullClassName + "]"); Mapping m = binder.getMapping(domainClass); mappings.setAutoImport(m == null || m.getAutoImport()); binder.bindClass(domainClass, mappings, sessionFactoryBeanName); } } try { currentThread.setContextClassLoader(grailsApplication.getClassLoader()); super.secondPassCompile(); createSubclassForeignKeys(); } finally { currentThread.setContextClassLoader(originalContextLoader); } configLocked = true; }
From source file:org.codehaus.groovy.ant.Groovy.java
/** * Exec the statement./* w w w. j av a2 s . c o m*/ * * @param txt the groovy source to exec * @param out not used? */ protected void execGroovy(final String txt, final PrintStream out) { log.debug("execGroovy()"); // Check and ignore empty statements if ("".equals(txt.trim())) { return; } log.verbose("Script: " + txt); if (classpath != null) { log.debug("Explicit Classpath: " + classpath.toString()); } if (fork) { log.debug("Using fork mode"); try { createClasspathParts(); createNewArgs(txt); super.setFork(fork); super.setClassname(useGroovyShell ? "groovy.lang.GroovyShell" : "org.codehaus.groovy.ant.Groovy"); configureCompiler(); super.execute(); } catch (Exception e) { StringWriter writer = new StringWriter(); new ErrorReporter(e, false).write(new PrintWriter(writer)); String message = writer.toString(); throw new BuildException("Script Failed: " + message, e, getLocation()); } return; } Object mavenPom = null; final Project project = getProject(); final ClassLoader baseClassLoader; ClassLoader savedLoader = null; final Thread thread = Thread.currentThread(); boolean maven = "org.apache.commons.grant.GrantProject".equals(project.getClass().getName()); // treat the case Ant is run through Maven, and if (maven) { if (contextClassLoader) { throw new BuildException("Using setContextClassLoader not permitted when using Maven.", getLocation()); } try { final Object propsHandler = project.getClass().getMethod("getPropsHandler").invoke(project); final Field contextField = propsHandler.getClass().getDeclaredField("context"); contextField.setAccessible(true); final Object context = contextField.get(propsHandler); mavenPom = InvokerHelper.invokeMethod(context, "getProject", new Object[0]); } catch (Exception e) { throw new BuildException("Impossible to retrieve Maven's Ant project: " + e.getMessage(), getLocation()); } // load groovy into "root.maven" classloader instead of "root" so that // groovy script can access Maven classes baseClassLoader = mavenPom.getClass().getClassLoader(); } else { baseClassLoader = GroovyShell.class.getClassLoader(); } if (contextClassLoader || maven) { savedLoader = thread.getContextClassLoader(); thread.setContextClassLoader(GroovyShell.class.getClassLoader()); } final String scriptName = computeScriptName(); final GroovyClassLoader classLoader = new GroovyClassLoader(baseClassLoader); addClassPathes(classLoader); configureCompiler(); final GroovyShell groovy = new GroovyShell(classLoader, new Binding(), configuration); try { parseAndRunScript(groovy, txt, mavenPom, scriptName, null, new AntBuilder(this)); } finally { groovy.resetLoadedClasses(); groovy.getClassLoader().clearCache(); if (contextClassLoader || maven) thread.setContextClassLoader(savedLoader); } }
From source file:net.lightbody.bmp.proxy.jetty.jetty.servlet.WebApplicationContext.java
/** Stop the web application. * Handlers for resource, servlet, filter and security are removed * as they are recreated and configured by any subsequent call to start(). * @exception InterruptedException //from w ww . ja va 2 s . co m */ protected void doStop() throws Exception { MultiException mex = new MultiException(); Thread thread = Thread.currentThread(); ClassLoader lastContextLoader = thread.getContextClassLoader(); try { // Context listeners if (_contextListeners != null) { if (_webAppHandler != null) { ServletContextEvent event = new ServletContextEvent(getServletContext()); for (int i = LazyList.size(_contextListeners); i-- > 0;) { try { ((ServletContextListener) LazyList.get(_contextListeners, i)).contextDestroyed(event); } catch (Exception e) { mex.add(e); } } } } _contextListeners = null; // Stop the context try { super.doStop(); } catch (Exception e) { mex.add(e); } // clean up clearSecurityConstraints(); if (_webAppHandler != null) removeHandler(_webAppHandler); _webAppHandler = null; if (_errorPages != null) _errorPages.clear(); _errorPages = null; _webApp = null; _webInf = null; _configurations = null; } finally { thread.setContextClassLoader(lastContextLoader); } if (mex != null) mex.ifExceptionThrow(); }
From source file:net.lightbody.bmp.proxy.jetty.http.HttpContext.java
protected synchronized void doStart() throws Exception { if (isStarted()) return;//www .j av a 2 s .c o m if (_httpServer.getServerClasses() != null) _serverClasses = _httpServer.getServerClasses(); if (_httpServer.getSystemClasses() != null) _systemClasses = _httpServer.getSystemClasses(); _resources.start(); statsReset(); if (_httpServer == null) throw new IllegalStateException("No server for " + this); // start the context itself _resources.getMimeMap(); _resources.getEncodingMap(); // Setup realm if (_userRealm == null && _authenticator != null) { _userRealm = _httpServer.getRealm(_realmName); if (_userRealm == null) log.warn("No Realm: " + _realmName); } // setup the context loader initClassLoader(false); // Set attribute if needed String attr = getInitParameter(__fileClassPathAttr); if (attr != null && attr.length() > 0) setAttribute(attr, getFileClassPath()); // Start the handlers Thread thread = Thread.currentThread(); ClassLoader lastContextLoader = thread.getContextClassLoader(); try { if (_loader != null) thread.setContextClassLoader(_loader); if (_requestLog != null) _requestLog.start(); startHandlers(); } finally { thread.setContextClassLoader(lastContextLoader); getHandlers(); } }
From source file:net.lightbody.bmp.proxy.jetty.http.HttpContext.java
/** Stop the context. *//* ww w . jav a 2 s . co m*/ protected void doStop() throws Exception { if (_httpServer == null) throw new InterruptedException("Destroy called"); synchronized (this) { // Notify the container for the stop Thread thread = Thread.currentThread(); ClassLoader lastContextLoader = thread.getContextClassLoader(); try { if (_loader != null) thread.setContextClassLoader(_loader); Iterator handlers = _handlers.iterator(); while (handlers.hasNext()) { HttpHandler handler = (HttpHandler) handlers.next(); if (handler.isStarted()) { try { handler.stop(); } catch (Exception e) { log.warn(LogSupport.EXCEPTION, e); } } } if (_requestLog != null) _requestLog.stop(); } finally { thread.setContextClassLoader(lastContextLoader); } // TODO this is a poor test if (_loader instanceof ContextLoader) { ((ContextLoader) _loader).destroy(); LogFactory.release(_loader); } _loader = null; } _resources.flushCache(); _resources.stop(); }