Example usage for java.lang Thread setContextClassLoader

List of usage examples for java.lang Thread setContextClassLoader

Introduction

In this page you can find the example usage for java.lang Thread setContextClassLoader.

Prototype

public void setContextClassLoader(ClassLoader cl) 

Source Link

Document

Sets the context ClassLoader for this Thread.

Usage

From source file:org.opencastproject.userdirectory.ldap.LdapUserProviderInstance.java

/**
 * Loads a user from LDAP./*  www. j a  v  a2s  .c o  m*/
 * 
 * @param userName
 *          the username
 * @return the user
 */
protected User loadUserFromLdap(String userName) {
    if (delegate == null || cache == null) {
        throw new IllegalStateException("The LDAP user detail service has not yet been configured");
    }
    ldapLoads.incrementAndGet();
    UserDetails userDetails = null;

    Thread currentThread = Thread.currentThread();
    ClassLoader originalClassloader = currentThread.getContextClassLoader();
    try {
        currentThread.setContextClassLoader(LdapUserProviderFactory.class.getClassLoader());
        try {
            userDetails = delegate.loadUserByUsername(userName);
        } catch (UsernameNotFoundException e) {
            cache.put(userName, nullToken);
            return null;
        }

        Collection<? extends GrantedAuthority> authorities = userDetails.getAuthorities();
        String[] roles = null;
        if (authorities != null) {
            int i = 0;
            roles = new String[authorities.size()];
            for (GrantedAuthority authority : authorities) {
                String role = authority.getAuthority();
                roles[i++] = role;
            }
        }
        User user = new User(userDetails.getUsername(), getOrganization(), roles);
        cache.put(userName, user);
        return user;
    } finally {
        currentThread.setContextClassLoader(originalClassloader);
    }
}

From source file:com.mg.jet.birt.report.data.oda.ejbql.HibernateUtil.java

private static synchronized void initSessionFactory(String hibfile, String mapdir, String jndiName)
        throws HibernateException {
    //ClassLoader cl1;

    if (sessionFactory == null) {

        if (jndiName == null || jndiName.trim().length() == 0)
            jndiName = CommonConstant.DEFAULT_JNDI_URL;
        Context initCtx = null;//from w  ww. j  a  v a2  s .  c  o  m
        try {
            initCtx = new InitialContext();
            sessionFactory = (SessionFactory) initCtx.lookup(jndiName);
            return;
        } catch (Exception e) {
            logger.log(Level.INFO, "Unable to get JNDI data source connection", e);
        } finally {
            if (initCtx != null)
                try {
                    initCtx.close();
                } catch (NamingException e) {
                    //ignore
                }
        }

        Thread thread = Thread.currentThread();
        try {
            //Class.forName("org.hibernate.Configuration");
            //Configuration ffff = new Configuration();
            //Class.forName("org.apache.commons.logging.LogFactory");

            oldloader = thread.getContextClassLoader();
            //Class thwy = oldloader.loadClass("org.hibernate.cfg.Configuration");
            //Class thwy2 = oldloader.loadClass("org.apache.commons.logging.LogFactory");
            //refreshURLs();
            //ClassLoader changeLoader = new URLClassLoader( (URL [])URLList.toArray(new URL[0]),HibernateUtil.class.getClassLoader());
            ClassLoader testLoader = new URLClassLoader((URL[]) URLList.toArray(new URL[0]), pluginLoader);
            //changeLoader = new URLClassLoader( (URL [])URLList.toArray(new URL[0]));

            thread.setContextClassLoader(testLoader);
            //Class thwy2 = changeLoader.loadClass("org.hibernate.cfg.Configuration");
            //Class.forName("org.apache.commons.logging.LogFactory", true, changeLoader);
            //Class cls = Class.forName("org.hibernate.cfg.Configuration", true, changeLoader);
            //Configuration cfg=null;
            //cfg = new Configuration();
            //Object oo = cls.newInstance();
            //Configuration cfg = (Configuration)oo;
            Configuration cfg = new Configuration();
            buildConfig(hibfile, mapdir, cfg);

            Class<? extends Driver> driverClass = testLoader
                    .loadClass(cfg.getProperty("connection.driver_class")).asSubclass(Driver.class);
            Driver driver = driverClass.newInstance();
            WrappedDriver wd = new WrappedDriver(driver, cfg.getProperty("connection.driver_class"));

            boolean foundDriver = false;
            Enumeration<Driver> drivers = DriverManager.getDrivers();
            while (drivers.hasMoreElements()) {
                Driver nextDriver = (Driver) drivers.nextElement();
                if (nextDriver.getClass() == wd.getClass()) {
                    if (nextDriver.toString().equals(wd.toString())) {
                        foundDriver = true;
                        break;
                    }
                }
            }
            if (!foundDriver) {

                DriverManager.registerDriver(wd);
            }

            sessionFactory = cfg.buildSessionFactory();
            //configuration = cfg;
            HibernateMapDirectory = mapdir;
            HibernateConfigFile = hibfile;
        } catch (Throwable e) {
            e.printStackTrace();
            throw new HibernateException("No Session Factory Created " + e.getLocalizedMessage(), e);
        } finally {
            thread.setContextClassLoader(oldloader);
        }
    }
}

From source file:org.sakaiproject.kernel.persistence.eclipselink.EntityManagerFactoryProvider.java

/**
 * Construct an EclipseLink entity manager provider.
 *
 * @param minRead//www  .j a  v a 2  s.c  o  m
 * @param minWrite
 * @param dataSourceService
 * @param unitName
 */
@Inject
@SuppressWarnings(value = {
        "DP_CREATE_CLASSLOADER_INSIDE_DO_PRIVILEGED" }, justification = "Expected to only ever be executed from a privalaged environment")
public EntityManagerFactoryProvider(DataSourceService dataSourceService,
        @Named(KernelConstants.DB_MIN_NUM_READ) String minRead,
        @Named(KernelConstants.DB_MIN_WRITE) String minWrite,
        @Named(KernelConstants.DB_UNITNAME) String unitName,
        @Named(KernelConstants.JDBC_DRIVER_NAME) String driverClassName,
        @Named(KernelConstants.JDBC_URL) String url, @Named(KernelConstants.JDBC_USERNAME) String username,
        @Named(KernelConstants.JDBC_PASSWORD) String password) {

    Map<String, Object> properties = new HashMap<String, Object>();

    // Ensure RESOURCE_LOCAL transactions is used.
    properties.put(TRANSACTION_TYPE, PersistenceUnitTransactionType.RESOURCE_LOCAL.name());

    LOG.info("Using provided data source");
    properties.put(dataSourceService.getType(), dataSourceService.getDataSource());

    // Configure the internal EclipseLink connection pool
    // LOG.info("Creating internal data source");
    // properties.put(PersistenceUnitProperties.JDBC_DRIVER, driverClassName);
    // properties.put(PersistenceUnitProperties.JDBC_URL, url);
    // properties.put(PersistenceUnitProperties.JDBC_USER, username);
    // properties.put(PersistenceUnitProperties.JDBC_PASSWORD, password);
    // properties
    // .put(PersistenceUnitProperties.JDBC_READ_CONNECTIONS_MIN, minRead);
    // properties.put(PersistenceUnitProperties.JDBC_WRITE_CONNECTIONS_MIN,
    // minWrite);

    // Configure logging. FINE ensures all SQL is shown
    properties.put(LOGGING_LEVEL, (debug ? "FINE" : "INFO"));
    properties.put(LOGGING_TIMESTAMP, "true");
    properties.put(LOGGING_THREAD, "true");
    properties.put(LOGGING_SESSION, "true");

    // Ensure that no server-platform is configured
    properties.put(TARGET_SERVER, TargetServer.None);

    properties.put(PersistenceUnitProperties.DDL_GENERATION, PersistenceUnitProperties.CREATE_ONLY);
    properties.put(PersistenceUnitProperties.DROP_JDBC_DDL_FILE, "drop.sql");
    properties.put(PersistenceUnitProperties.CREATE_JDBC_DDL_FILE, "create.sql");
    properties.put(PersistenceUnitProperties.DDL_GENERATION_MODE,
            PersistenceUnitProperties.DDL_BOTH_GENERATION);

    // properties.put(PersistenceUnitProperties.SESSION_CUSTOMIZER,
    // EnableIntegrityChecker.class.getName());

    LOG.info("Starting connection manager with properties " + properties);
    final Thread currentThread = Thread.currentThread();
    final ClassLoader saveClassLoader = currentThread.getContextClassLoader();

    PersistenceUnitClassLoader persistenceCL = new PersistenceUnitClassLoader(this.getClass().getClassLoader());
    currentThread.setContextClassLoader(persistenceCL);
    entityManagerFactory = Persistence.createEntityManagerFactory(unitName, properties);
    currentThread.setContextClassLoader(saveClassLoader);

}

From source file:com.freetmp.common.util.ClassUtils.java

public static ClassLoader overrideThreadContextClassLoader(ClassLoader classLoaderToUse) {
    Thread currentThread = Thread.currentThread();
    ClassLoader threadContextClassLoader = currentThread.getContextClassLoader();
    if (classLoaderToUse != null && !classLoaderToUse.equals(threadContextClassLoader)) {
        currentThread.setContextClassLoader(classLoaderToUse);
        return threadContextClassLoader;
    } else {/* www  .  j a va 2 s. c  o m*/
        return null;
    }
}

From source file:org.apereo.portal.portlet.container.EventProviderImpl.java

@Override
public Event createEvent(QName qname, Serializable value) throws IllegalArgumentException {
    if (this.isDeclaredAsPublishingEvent(qname)) {
        if (value != null && !this.isValueInstanceOfDefinedClass(qname, value)) {
            throw new IllegalArgumentException("Payload class (" + value.getClass().getCanonicalName()
                    + ") does not have the right class, check your defined event types in portlet.xml.");
        }// ww  w .  java  2s  . c o  m

        if (value == null) {
            return new EventImpl(qname);
        }

        try {
            final Thread currentThread = Thread.currentThread();
            final ClassLoader cl = currentThread.getContextClassLoader();
            final Writer out = new StringWriter();
            final Class clazz = value.getClass();
            try {
                currentThread.setContextClassLoader(this.portletClassLoader);
                final JAXBContext jc = JAXBContext.newInstance(clazz);
                final Marshaller marshaller = jc.createMarshaller();
                final JAXBElement<Serializable> element = new JAXBElement<Serializable>(qname, clazz, value);
                marshaller.marshal(element, out);
            } finally {
                currentThread.setContextClassLoader(cl);
            }
            return new EventImpl(qname, out.toString());
        } catch (JAXBException e) {
            // maybe there is no valid jaxb binding
            // TODO throw exception?
            logger.error("Event handling failed", e);
        } catch (FactoryConfigurationError e) {
            // TODO throw exception?
            logger.warn(e.getMessage(), e);
        }
    }
    return null;
}

From source file:org.springframework.cloud.dataflow.app.launcher.ModuleLauncher.java

public void launchAggregatedModules(List<ModuleLaunchRequest> moduleLaunchRequests,
        Map<String, String> aggregateArgs) {
    try {/*from   w  ww  .j ava2  s . c o  m*/
        List<String> mainClassNames = new ArrayList<>();
        LinkedHashSet<URL> jarURLs = new LinkedHashSet<>();
        List<String> seenArchives = new ArrayList<>();
        final List<String[]> arguments = new ArrayList<>();
        final ClassLoader classLoader;
        if (!(aggregateArgs.containsKey(EXCLUDE_DEPENDENCIES_ARG)
                || aggregateArgs.containsKey(INCLUDE_DEPENDENCIES_ARG))) {
            for (ModuleLaunchRequest moduleLaunchRequest : moduleLaunchRequests) {
                Resource resource = resolveModule(moduleLaunchRequest.getModule());
                JarFileArchive jarFileArchive = new JarFileArchive(resource.getFile());
                jarURLs.add(jarFileArchive.getUrl());
                for (Archive archive : jarFileArchive.getNestedArchives(ArchiveMatchingEntryFilter.FILTER)) {
                    // avoid duplication based on unique JAR names
                    String urlAsString = archive.getUrl().toString();
                    String jarNameWithExtension = urlAsString.substring(0, urlAsString.lastIndexOf("!/"));
                    String jarNameWithoutExtension = jarNameWithExtension
                            .substring(jarNameWithExtension.lastIndexOf("/") + 1);
                    if (!seenArchives.contains(jarNameWithoutExtension)) {
                        seenArchives.add(jarNameWithoutExtension);
                        jarURLs.add(archive.getUrl());
                    }
                }
                mainClassNames.add(jarFileArchive.getMainClass());
                arguments.add(toArgArray(moduleLaunchRequest.getArguments()));
            }
            classLoader = ClassloaderUtils.createModuleClassloader(jarURLs.toArray(new URL[jarURLs.size()]));
        } else {
            // First, resolve modules and extract main classes - while slightly less efficient than just
            // doing the same processing after resolution, this ensures that module artifacts are processed
            // correctly for extracting their main class names. It is not possible in the general case to
            // identify, after resolution, whether a resource represents a module artifact which was part of the
            // original request or not. We will include the first module as root and the next as direct dependencies
            Coordinates root = null;
            ArrayList<Coordinates> includeCoordinates = new ArrayList<>();
            for (ModuleLaunchRequest moduleLaunchRequest : moduleLaunchRequests) {
                Coordinates moduleCoordinates = toCoordinates(moduleLaunchRequest.getModule());
                if (root == null) {
                    root = moduleCoordinates;
                } else {
                    includeCoordinates.add(toCoordinates(moduleLaunchRequest.getModule()));
                }
                Resource moduleResource = resolveModule(moduleLaunchRequest.getModule());
                JarFileArchive moduleArchive = new JarFileArchive(moduleResource.getFile());
                mainClassNames.add(moduleArchive.getMainClass());
                arguments.add(toArgArray(moduleLaunchRequest.getArguments()));
            }
            for (String include : StringUtils
                    .commaDelimitedListToStringArray(aggregateArgs.get(INCLUDE_DEPENDENCIES_ARG))) {
                includeCoordinates.add(toCoordinates(include));
            }
            // Resolve all artifacts - since modules have been specified as direct dependencies, they will take
            // precedence in the resolution order, ensuring that the already resolved artifacts will be returned as
            // part of the response.
            Resource[] libraries = moduleResolver.resolve(root,
                    includeCoordinates.toArray(new Coordinates[includeCoordinates.size()]),
                    StringUtils.commaDelimitedListToStringArray(aggregateArgs.get(EXCLUDE_DEPENDENCIES_ARG)));
            for (Resource library : libraries) {
                jarURLs.add(library.getURL());
            }
            classLoader = new URLClassLoader(jarURLs.toArray(new URL[jarURLs.size()]));
        }

        final List<Class<?>> mainClasses = new ArrayList<>();
        for (String mainClass : mainClassNames) {
            mainClasses.add(ClassUtils.forName(mainClass, classLoader));
        }
        Runnable moduleAggregatorRunner = new ModuleAggregatorRunner(classLoader, mainClasses,
                toArgArray(aggregateArgs), arguments);
        Thread moduleAggregatorRunnerThread = new Thread(moduleAggregatorRunner);
        moduleAggregatorRunnerThread.setContextClassLoader(classLoader);
        moduleAggregatorRunnerThread.setName(MODULE_AGGREGATOR_RUNNER_THREAD_NAME);
        moduleAggregatorRunnerThread.start();
    } catch (Exception e) {
        throw new RuntimeException("failed to start aggregated modules: "
                + StringUtils.collectionToCommaDelimitedString(moduleLaunchRequests), e);
    }
}

From source file:org.nuxeo.runtime.jetty.JettyComponent.java

@Override
public void applicationStarted(ComponentContext context) {
    if (server == null) {
        return;/*  w w w  .  ja va2s.c  o m*/
    }
    ctxMgr.applyLifecycleListeners();
    Thread t = Thread.currentThread();
    ClassLoader oldcl = t.getContextClassLoader();
    t.setContextClassLoader(getClass().getClassLoader());
    try {
        server.start();
    } catch (Exception e) { // stupid Jetty API throws Exception
        throw ExceptionUtils.runtimeException(e);
    } finally {
        t.setContextClassLoader(getClassLoader(oldcl));
    }
}

From source file:org.openspaces.maven.plugin.RunStandalonePUMojo.java

/**
 * Prepares and executes the PU./*from  ww  w .  ja v  a2  s. c o  m*/
 *
 * @throws MojoExecutionException
 * @throws MojoFailureException
 */
private void executePU(MavenProject project) throws MojoExecutionException, MojoFailureException {
    if (project == null || !project.getPackaging().equalsIgnoreCase("jar")) {
        throw new MojoExecutionException("The processing unit project '"
                + (project == null ? "unknown" : project.getName()) + "' must be of type jar (packaging=jar).");
    }

    // resolve the classpath for the execution of the processing unit
    List<URL> classpath;
    ClassLoader classLoader;
    try {
        String[] includeScopes = Utils.convertCommaSeparatedListToArray(scopes);
        classpath = Utils.resolveExecutionClasspath(project, includeScopes, false, reactorProjects,
                dependencyTreeBuilder, metadataSource, artifactCollector, artifactResolver, artifactFactory,
                localRepository, remoteRepositories);
        PluginLog.getLog().info("Processing unit [" + project.getName() + "] classpath: " + classpath);
        classLoader = Utils.createClassLoader(classpath, null);
    } catch (Exception e1) {
        throw new MojoExecutionException("Failed to resolve the processing unit's classpath", e1);
    }

    if (groups != null && !groups.trim().equals("")) {
        SystemInfo.singleton().lookup().setGroups(groups);
    }
    if (locators != null && !locators.trim().equals("")) {
        SystemInfo.singleton().lookup().setLocators(locators);
    }

    // run the PU
    PluginLog.getLog().info("Running processing unit: " + project.getBuild().getFinalName());

    ContainerRunnable conatinerRunnable = new ContainerRunnable(
            "org.openspaces.pu.container.standalone.StandaloneProcessingUnitContainer",
            createAttributesArray(Utils.getProcessingUnitJar((project))));
    Thread thread = new Thread(conatinerRunnable,
            "Processing Unit [" + project.getBuild().getFinalName() + "]");
    thread.setContextClassLoader(classLoader);
    thread.start();
    while (!conatinerRunnable.hasStarted()) {
        try {
            Thread.sleep(200);
        } catch (InterruptedException e) {
        }
    }
    if (conatinerRunnable.getException() != null) {
        Utils.throwMissingLicenseException(conatinerRunnable.getException(), localRepository);
        throw new MojoExecutionException(
                "Failed to start processing unit [" + project.getBuild().getFinalName() + "]",
                conatinerRunnable.getException());
    }
    containers.add(thread);
}

From source file:org.openspaces.maven.plugin.RunPUMojo.java

/**
 * Prepares and executes the PU./*from w w  w  . j  a va  2  s .c o m*/
 *
 * @throws MojoExecutionException
 * @throws MojoFailureException
 */
private void executePU(MavenProject project) throws MojoExecutionException, MojoFailureException {
    if (project == null || !project.getPackaging().equalsIgnoreCase("jar")) {
        throw new MojoExecutionException("The processing unit project '"
                + (project == null ? "unknown" : project.getName()) + "' must be of type jar (packaging=jar).");
    }

    // run the PU
    PluginLog.getLog().info("Running processing unit: " + project.getBuild().getFinalName());

    // resolve the classpath for the execution of the processing unit
    List classpath = null;
    ClassLoader classLoader = null;
    try {
        String[] includeScopes = Utils.convertCommaSeparatedListToArray(scopes);
        classpath = Utils.resolveExecutionClasspath(project, includeScopes, true, reactorProjects,
                dependencyTreeBuilder, metadataSource, artifactCollector, artifactResolver, artifactFactory,
                localRepository, remoteRepositories);
        PluginLog.getLog().info("Processing unit [" + project.getName() + "] classpath: " + classpath);
        classLoader = Utils.createClassLoader(classpath, null);
    } catch (Exception e1) {
        throw new MojoExecutionException("Failed to resolve the processing unit's  classpath", e1);
    }

    // set groups
    if (groups != null && !groups.trim().equals("")) {
        SystemInfo.singleton().lookup().setGroups(groups);
    }

    // set locators
    if (locators != null && !locators.trim().equals("")) {
        SystemInfo.singleton().lookup().setLocators(locators);
    }

    // execute the processing unit in the new class loader 
    ContainerRunnable conatinerRunnable = new ContainerRunnable(
            "org.openspaces.pu.container.integrated.IntegratedProcessingUnitContainer",
            createAttributesArray());
    Thread thread = new Thread(conatinerRunnable,
            "Processing Unit [" + project.getBuild().getFinalName() + "]");
    thread.setContextClassLoader(classLoader);
    thread.start();
    while (!conatinerRunnable.hasStarted()) {
        try {
            Thread.sleep(200);
        } catch (InterruptedException e) {
        }
    }
    if (conatinerRunnable.getException() != null) {
        Utils.throwMissingLicenseException(conatinerRunnable.getException(), localRepository);
        throw new MojoExecutionException(
                "Failed to start processing unit [" + project.getBuild().getFinalName() + "]",
                conatinerRunnable.getException());
    }
    containers.add(thread);
}

From source file:org.apache.hadoop.hbase.regionserver.RegionServerCoprocessorHost.java

private boolean execOperation(final CoprocessorOperation ctx) throws IOException {
    if (ctx == null)
        return false;

    boolean bypass = false;
    for (RegionServerEnvironment env : coprocessors) {
        if (env.getInstance() instanceof RegionServerObserver) {
            ctx.prepare(env);// w  w w. j  av a2  s .  c  o  m
            Thread currentThread = Thread.currentThread();
            ClassLoader cl = currentThread.getContextClassLoader();
            try {
                currentThread.setContextClassLoader(env.getClassLoader());
                ctx.call((RegionServerObserver) env.getInstance(), ctx);
            } catch (Throwable e) {
                handleCoprocessorThrowable(env, e);
            } finally {
                currentThread.setContextClassLoader(cl);
            }
            bypass |= ctx.shouldBypass();
            if (ctx.shouldComplete()) {
                break;
            }
        }
        ctx.postEnvCall(env);
    }
    return bypass;
}