List of usage examples for java.lang Thread setContextClassLoader
public void setContextClassLoader(ClassLoader cl)
From source file:org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.java
/** * @param s the scanner//from w w w. j a v a2 s. c o m * @param results the result set returned by the region server * @param limit the maximum number of results to return * @return 'has next' indication to client if bypassing default behavior, or * null otherwise * @exception IOException Exception */ public Boolean preScannerNext(final InternalScanner s, final List<Result> results, final int limit) throws IOException { boolean bypass = false; boolean hasNext = false; ObserverContext<RegionCoprocessorEnvironment> ctx = null; for (RegionEnvironment env : coprocessors) { if (env.getInstance() instanceof RegionObserver) { ctx = ObserverContext.createAndPrepare(env, ctx); Thread currentThread = Thread.currentThread(); ClassLoader cl = currentThread.getContextClassLoader(); try { currentThread.setContextClassLoader(env.getClassLoader()); hasNext = ((RegionObserver) env.getInstance()).preScannerNext(ctx, s, results, limit, hasNext); } catch (Throwable e) { handleCoprocessorThrowable(env, e); } finally { currentThread.setContextClassLoader(cl); } bypass |= ctx.shouldBypass(); if (ctx.shouldComplete()) { break; } } } return bypass ? hasNext : null; }
From source file:org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.java
/** * @param fs fileystem to read from/*from w ww .j a v a 2 s .c om*/ * @param p path to the file * @param in {@link FSDataInputStreamWrapper} * @param size Full size of the file * @param cacheConf * @param r original reference file. This will be not null only when reading a split file. * @return a Reader instance to use instead of the base reader if overriding * default behavior, null otherwise * @throws IOException */ public StoreFile.Reader preStoreFileReaderOpen(final FileSystem fs, final Path p, final FSDataInputStreamWrapper in, final long size, final CacheConfig cacheConf, final Reference r) throws IOException { StoreFile.Reader reader = null; ObserverContext<RegionCoprocessorEnvironment> ctx = null; for (RegionEnvironment env : coprocessors) { if (env.getInstance() instanceof RegionObserver) { ctx = ObserverContext.createAndPrepare(env, ctx); Thread currentThread = Thread.currentThread(); ClassLoader cl = currentThread.getContextClassLoader(); try { currentThread.setContextClassLoader(env.getClassLoader()); reader = ((RegionObserver) env.getInstance()).preStoreFileReaderOpen(ctx, fs, p, in, size, cacheConf, r, reader); } catch (Throwable e) { handleCoprocessorThrowable(env, e); } finally { currentThread.setContextClassLoader(cl); } if (ctx.shouldComplete()) { break; } } } return reader; }
From source file:org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.java
/** * @param row row to check//from w ww . ja va 2s.c om * @param family column family * @param qualifier column qualifier * @param compareOp the comparison operation * @param comparator the comparator * @param put data to put if check succeeds * @return true or false to return to client if default processing should * be bypassed, or null otherwise * @throws IOException e */ public Boolean preCheckAndPut(final byte[] row, final byte[] family, final byte[] qualifier, final CompareOp compareOp, final ByteArrayComparable comparator, final Put put) throws IOException { boolean bypass = false; boolean result = false; ObserverContext<RegionCoprocessorEnvironment> ctx = null; for (RegionEnvironment env : coprocessors) { if (env.getInstance() instanceof RegionObserver) { ctx = ObserverContext.createAndPrepare(env, ctx); Thread currentThread = Thread.currentThread(); ClassLoader cl = currentThread.getContextClassLoader(); try { currentThread.setContextClassLoader(env.getClassLoader()); result = ((RegionObserver) env.getInstance()).preCheckAndPut(ctx, row, family, qualifier, compareOp, comparator, put, result); } catch (Throwable e) { handleCoprocessorThrowable(env, e); } finally { currentThread.setContextClassLoader(cl); } bypass |= ctx.shouldBypass(); if (ctx.shouldComplete()) { break; } } } return bypass ? result : null; }
From source file:org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.java
/** * @param row row to check//from w w w .j ava 2 s .c o m * @param family column family * @param qualifier column qualifier * @param compareOp the comparison operation * @param comparator the comparator * @param delete delete to commit if check succeeds * @return true or false to return to client if default processing should * be bypassed, or null otherwise * @throws IOException e */ public Boolean preCheckAndDelete(final byte[] row, final byte[] family, final byte[] qualifier, final CompareOp compareOp, final ByteArrayComparable comparator, final Delete delete) throws IOException { boolean bypass = false; boolean result = false; ObserverContext<RegionCoprocessorEnvironment> ctx = null; for (RegionEnvironment env : coprocessors) { if (env.getInstance() instanceof RegionObserver) { ctx = ObserverContext.createAndPrepare(env, ctx); Thread currentThread = Thread.currentThread(); ClassLoader cl = currentThread.getContextClassLoader(); try { currentThread.setContextClassLoader(env.getClassLoader()); result = ((RegionObserver) env.getInstance()).preCheckAndDelete(ctx, row, family, qualifier, compareOp, comparator, delete, result); } catch (Throwable e) { handleCoprocessorThrowable(env, e); } finally { currentThread.setContextClassLoader(cl); } bypass |= ctx.shouldBypass(); if (ctx.shouldComplete()) { break; } } } return bypass ? result : null; }
From source file:org.jboss.confluence.plugin.docbook_tools.docbookimport.DocbookImporter.java
/** * Process XSLT transformation./*from www .ja v a2 s. c o m*/ * * @param xsltTemplate input stream with XSLT template file used to transform (closed inside this method) * @param xmlToTransform input stream with XML file to transform (closed inside this method) * @param xmlToTransformURL URL of <code>xmlToTransform</code> file (may be <code>file://</code> too). We need it to * correctly evaluate relative paths. * @param output stream to write transformed output to * @throws javax.xml.transform.TransformerException */ protected void processXslt(final InputStream xsltTemplate, final InputStream xmlToTransform, final String xmlToTransformURL, final OutputStream output) throws Exception { final XSLTErrorListener errorListener = new XSLTErrorListener(); final SAXErrorHandler eh = new SAXErrorHandler(); Thread th = new Thread(new Runnable() { public void run() { try { org.xml.sax.InputSource xmlSource = new org.xml.sax.InputSource(xmlToTransform); xmlSource.setSystemId(xmlToTransformURL); javax.xml.transform.Source xsltSource = new javax.xml.transform.stream.StreamSource( xsltTemplate); javax.xml.transform.Result result = new javax.xml.transform.stream.StreamResult(output); // prepare XInclude aware parser which resolves necessary entities correctly XMLReader reader = new ParserAdapter(saxParserFactory.newSAXParser().getParser()); reader.setEntityResolver(new JDGEntityResolver(reader.getEntityResolver())); reader.setErrorHandler(eh); SAXSource xmlSAXSource = new SAXSource(reader, xmlSource); javax.xml.transform.Transformer trans = transformerFact.newTransformer(xsltSource); trans.setErrorListener(errorListener); trans.transform(xmlSAXSource, result); } catch (Exception e) { if (e instanceof TransformerException) { errorListener.setException((TransformerException) e); } else { errorListener.setException(new TransformerException(e)); } } finally { FileUtils.closeInputStream(xmlToTransform); FileUtils.closeInputStream(xsltTemplate); } } }); th.setName("DocbookImporter XSLT transformation thread"); th.setDaemon(true); th.setContextClassLoader(DocbookImporter.class.getClassLoader()); th.start(); th.join(); if (eh.getException() != null) { throw eh.getException(); } if (errorListener.getException() != null) { throw errorListener.getException(); } }
From source file:net.lightbody.bmp.proxy.jetty.http.HttpContext.java
/** Enter the context scope. * This method is called (by handle or servlet dispatchers) to indicate that * request handling is entering the scope of this context. The opaque scope object * returned, should be passed to the leaveContextScope method. *//*from w ww. j a va2s . c o m*/ public Object enterContextScope(HttpRequest request, HttpResponse response) { // Save the thread context loader Thread thread = Thread.currentThread(); ClassLoader cl = thread.getContextClassLoader(); HttpContext c = response.getHttpContext(); Scope scope = null; if (cl != HttpContext.class.getClassLoader() || c != null) { scope = new Scope(); scope._classLoader = cl; scope._httpContext = c; } if (_loader != null) thread.setContextClassLoader(_loader); response.setHttpContext(this); return scope; }
From source file:com.baidu.jprotobuf.mojo.PreCompileMojo.java
/** * Execute goal.//from w w w . j a v a 2s . co m * * @throws MojoExecutionException execution of the main class or one of the threads it generated failed. * @throws MojoFailureException something bad happened... */ public void execute() throws MojoExecutionException, MojoFailureException { if (isSkip()) { getLog().info("skipping execute as per configuraion"); return; } if (killAfter != -1) { getLog().warn("Warning: killAfter is now deprecated. Do you need it ? Please comment on MEXEC-6."); } arguments = new String[] { outputParentDirectory.getAbsolutePath(), outputDirectory.getAbsolutePath(), filterClassPackage }; if (getLog().isDebugEnabled()) { StringBuffer msg = new StringBuffer("Invoking : "); msg.append(mainClass); msg.append(".main("); for (int i = 0; i < arguments.length; i++) { if (i > 0) { msg.append(", "); } msg.append(arguments[i]); } msg.append(")"); getLog().debug(msg); } final Log log = getLog(); IsolatedThreadGroup threadGroup = new IsolatedThreadGroup(mainClass /* name */); Thread bootstrapThread = new Thread(threadGroup, new Runnable() { public void run() { long current = System.currentTimeMillis(); try { Method main = Thread.currentThread().getContextClassLoader().loadClass(mainClass) .getMethod("main", new Class[] { String[].class }); if (!main.isAccessible()) { getLog().debug("Setting accessibility to true in order to invoke main()."); main.setAccessible(true); } if (!Modifier.isStatic(main.getModifiers())) { throw new MojoExecutionException( "Can't call main(String[])-method because it is not static."); } main.invoke(null, new Object[] { arguments }); } catch (NoSuchMethodException e) { // just pass it on Thread.currentThread().getThreadGroup().uncaughtException(Thread.currentThread(), new Exception( "The specified mainClass doesn't contain a main method with appropriate signature.", e)); } catch (Exception e) { // just pass it on Thread.currentThread().getThreadGroup().uncaughtException(Thread.currentThread(), e); } finally { log.info("JProtobuf pre compile done time took: " + (System.currentTimeMillis() - current) + "ms"); } } }, mainClass + ".main()"); bootstrapThread.setContextClassLoader(getClassLoader()); setSystemProperties(); bootstrapThread.start(); joinNonDaemonThreads(threadGroup); // It's plausible that spontaneously a non-daemon thread might be created as we try and shut down, // but it's too late since the termination condition (only daemon threads) has been triggered. if (keepAlive) { getLog().warn( "Warning: keepAlive is now deprecated and obsolete. Do you need it? Please comment on MEXEC-6."); waitFor(0); } if (cleanupDaemonThreads) { terminateThreads(threadGroup); try { threadGroup.destroy(); } catch (IllegalThreadStateException e) { getLog().warn("Couldn't destroy threadgroup " + threadGroup, e); } } if (originalSystemProperties != null) { System.setProperties(originalSystemProperties); } synchronized (threadGroup) { if (threadGroup.uncaughtException != null) { throw new MojoExecutionException("An exception occured while executing the Java class. " + threadGroup.uncaughtException.getMessage(), threadGroup.uncaughtException); } } registerSourceRoots(); }
From source file:org.openspaces.admin.internal.admin.DefaultAdmin.java
private ScheduledThreadPoolExecutor createScheduledThreadPoolExecutor(String threadName, int numberOfThreads) { final ClassLoader correctClassLoader = Thread.currentThread().getContextClassLoader(); ScheduledThreadPoolExecutor executorService = (ScheduledThreadPoolExecutor) Executors .newScheduledThreadPool(numberOfThreads, new GSThreadFactory(threadName, useDaemonThreads) { @Override/* ww w.j a v a2 s .com*/ public Thread newThread(Runnable r) { Thread thread = super.newThread(r); thread.setContextClassLoader(correctClassLoader); return thread; } }); return executorService; }
From source file:org.openspaces.admin.internal.admin.DefaultAdmin.java
private ThreadPoolExecutor createThreadPoolExecutor(String threadName, int numberOfThreads, final boolean updateSingleThreadId) { final ClassLoader correctClassLoader = Thread.currentThread().getContextClassLoader(); ThreadPoolExecutor executorService = (ThreadPoolExecutor) Executors.newFixedThreadPool(numberOfThreads, new GSThreadFactory(threadName, useDaemonThreads) { @Override/*from www . ja v a2s .c o m*/ public Thread newThread(Runnable r) { Thread thread = super.newThread(r); thread.setContextClassLoader(correctClassLoader); if (updateSingleThreadId) { DefaultAdmin.this.executorSingleThreadId = thread.getId(); } return thread; } }); executorService.setRejectedExecutionHandler(DEFAULT_EVENT_LISTENER_REJECTED_POLICY); return executorService; }
From source file:architecture.ee.plugin.impl.PluginManagerImpl.java
public List loadPlugin(File pluginDir, PluginEntityObject pluginDbBean) throws PluginException { if (!ApplicationHelper.isSetupComplete()) { return Collections.emptyList(); }//from w ww.j ava 2s . c o m log.debug((new StringBuilder()).append("Loading action from: ").append(pluginDir.getName()).toString()); Document pluginXML; try { pluginXML = PluginUtils.getPluginConfiguration(pluginDir); } catch (DocumentException e) { pluginXML = null; } if (pluginXML == null) { String msg = (new StringBuilder()).append("Plugin ").append(pluginDir.getName()) .append(" could not be loaded: no plugin.xml file found").toString(); log.error(msg); brokenPlugins.put(pluginDir.getName(), "No plugin.xml found."); throw new PluginException(msg); } ArrayList results = Lists.newArrayList(); String pluginName; PluginClassLoader pluginLoader; Node pluginNameNode = pluginXML.selectSingleNode("/plugin/name"); pluginName = pluginNameNode.getText(); isValidVersion(pluginName, pluginXML, pluginDir); pluginLoader = getPluginClassloader(pluginName, pluginDir); if (pluginLoader == null) { return Collections.emptyList(); } pluginLoader.initialize(); log.debug("Plugin classloader urls:" + pluginLoader.getURLS()); Plugin plugin; PluginMetaDataImpl metaData; ConfigurationContext context; Thread currentThread; ClassLoader oldLoader; Node classNode = pluginXML.selectSingleNode("/plugin/class"); if (classNode != null) { String className = classNode.getText(); try { log.debug("Plugin class:" + className); plugin = (Plugin) pluginLoader.loadClass(className).newInstance(); log.debug("Plugin object:" + plugin); log.debug("******************************** "); } catch (Throwable e) { brokenPlugins.put(pluginDir.getName(), "Failed to configure class loader."); log.debug(e); throw new PluginException(e); } } else { plugin = new DummyPlugin(pluginName); } log.debug("===============================1============"); metaData = new PluginMetaDataImpl(plugin, pluginLoader, this, pluginXML, pluginDir); log.debug("=========================2=================="); metaData.setPluginDbBean(pluginDbBean); log.debug("=======================3===================="); registerPlugin(plugin, pluginDir); log.debug("=======================4===================="); pluginMeta.put(pluginName, metaData); log.debug("======================5====================="); pluginMeta.put(plugin, metaData); log.debug("====================6======================="); context = new ConfigurationContext(metaData); log.debug("=======================7===================="); currentThread = Thread.currentThread(); oldLoader = currentThread.getContextClassLoader(); log.debug("==========================================="); try { currentThread.setContextClassLoader(pluginLoader.getClassLoader()); log.debug("Plugin configures:" + configurators.size()); for (PluginConfigurator configurator : configurators) { log.debug("Plugin configure:" + configurator.getClass().getName()); configurator.configure(context); } } catch (Exception e) { brokenPlugins.put(pluginDir.getName(), "Failed to configure class loader."); throw new PluginException(e); } finally { if (oldLoader != null) currentThread.setContextClassLoader(oldLoader); } log.debug("==========================================="); int pluginDbVersion = getPluginDatabaseVersion(metaData); boolean init = true; if (pluginDbVersion > 0 && metaData.getDatabaseVersion() != pluginDbVersion) { brokenPlugins.put(pluginDir.getName(), (new StringBuilder()).append("Database version mismatches plugin version. Current: ") .append(pluginDbVersion).append(", Required: ").append(metaData.getDatabaseVersion()) .toString()); init = false; } if (init) { try { plugin.init(); firePluginCreatedEvent(pluginDir.getName(), plugin); } catch (IncompatibleClassChangeError e) { log.error((new StringBuilder()).append("Unable to initialize plugin, plugin ").append(pluginName) .append(" binds to an old class version needs to be required.").toString()); results.add(PluginRequiresRebuildResult.getPluginRequiresRebuildResult()); brokenPlugins.put(pluginDir.getName(), "Failed to initialize."); } results.addAll(context.getResults()); ChainingClassLoader.clearCache(); } return results; }