Example usage for java.net URLClassLoader loadClass

List of usage examples for java.net URLClassLoader loadClass

Introduction

In this page you can find the example usage for java.net URLClassLoader loadClass.

Prototype

public Class<?> loadClass(String name) throws ClassNotFoundException 

Source Link

Document

Loads the class with the specified binary name.

Usage

From source file:org.springframework.ldap.itest.ad.SchemaToJavaAdITest.java

@Test
public void verifySchemaToJavaOnAd() throws Exception {
    final String className = "Person";
    final String packageName = "org.springframework.ldap.odm.testclasses";

    File tempFile = File.createTempFile("test-odm-syntax-to-class-map", ".txt");
    FileUtils.copyInputStreamToFile(new ClassPathResource("/syntax-to-class-map.txt").getInputStream(),
            tempFile);//from  w  w w  .ja  va 2s  .c o  m

    // Add classes dir to class path - needed for compilation
    System.setProperty("java.class.path",
            System.getProperty("java.class.path") + File.pathSeparator + "target/classes");

    String[] flags = new String[] { "--url", "ldaps://127.0.0.1:" + port, "--objectclasses",
            "organizationalperson", "--syntaxmap", tempFile.getAbsolutePath(), "--class", className,
            "--package", packageName, "--outputdir", tempDir, "--username", USER_DN, "--password", PASSWORD };

    // Generate the code using SchemaToJava
    SchemaToJava.main(flags);

    tempFile.delete();

    // Java 5 - we'll use the Java 6 Compiler API once we can drop support for Java 5.
    String javaDir = calculateOutputDirectory(tempDir, packageName);

    CompilerInterface.compile(javaDir, className + ".java");
    // Java 5

    // OK it compiles so lets load our new class
    URL[] urls = new URL[] { new File(tempDir).toURI().toURL() };
    URLClassLoader ucl = new URLClassLoader(urls, getClass().getClassLoader());
    Class<?> clazz = ucl.loadClass(packageName + "." + className);

    // Create our OdmManager using our new class
    OdmManagerImpl odmManager = new OdmManagerImpl(converterManager, contextSource);
    odmManager.addManagedClass(clazz);

    // And try reading from the directory using it
    DistinguishedName testDn = new DistinguishedName("cn=William Hartnell,cn=Users");
    Object fromDirectory = odmManager.read(clazz, testDn);

    LOG.debug(String.format("Read - %1$s", fromDirectory));

    // Check some returned values
    Method getDnMethod = clazz.getMethod("getDn");
    Object dn = getDnMethod.invoke(fromDirectory);
    assertThat(dn).isEqualTo(testDn);

    Method getCnIteratorMethod = clazz.getMethod("getCn");
    @SuppressWarnings("unchecked")
    String cn = (String) getCnIteratorMethod.invoke(fromDirectory);
    assertThat(cn).isEqualTo("William Hartnell");

    Method telephoneNumberIteratorMethod = clazz.getMethod("getTelephoneNumber");
    @SuppressWarnings("unchecked")
    String telephoneNumber = (String) telephoneNumberIteratorMethod.invoke(fromDirectory);
    assertThat(telephoneNumber).isEqualTo("1");

    // Reread and check whether equals and hashCode are at least sane
    Object fromDirectory2 = odmManager.read(clazz, testDn);
    assertThat(fromDirectory2).isEqualTo(fromDirectory);
    assertThat(fromDirectory2.hashCode()).isEqualTo(fromDirectory.hashCode());
}

From source file:com.cloudera.impala.catalog.CatalogServiceCatalog.java

/**
 * Returns a list of Impala Functions, one per compatible "evaluate" method in the UDF
 * class referred to by the given Java function. This method copies the UDF Jar
 * referenced by "function" to a temporary file in "LOCAL_LIBRARY_PATH" and loads it
 * into the jvm. Then we scan all the methods in the class using reflection and extract
 * those methods and create corresponding Impala functions. Currently Impala supports
 * only "JAR" files for symbols and also a single Jar containing all the dependent
 * classes rather than a set of Jar files.
 *///w  w w  .  j  av  a  2  s . c  o  m
public static List<Function> extractFunctions(String db, org.apache.hadoop.hive.metastore.api.Function function)
        throws ImpalaRuntimeException {
    List<Function> result = Lists.newArrayList();
    List<String> addedSignatures = Lists.newArrayList();
    boolean compatible = true;
    StringBuilder warnMessage = new StringBuilder();
    if (function.getFunctionType() != FunctionType.JAVA) {
        compatible = false;
        warnMessage.append("Function type: " + function.getFunctionType().name() + " is not supported. Only "
                + FunctionType.JAVA.name() + " functions " + "are supported.");
    }
    if (function.getResourceUrisSize() != 1) {
        compatible = false;
        List<String> resourceUris = Lists.newArrayList();
        for (ResourceUri resource : function.getResourceUris()) {
            resourceUris.add(resource.getUri());
        }
        warnMessage.append("Impala does not support multiple Jars for dependencies." + "("
                + Joiner.on(",").join(resourceUris) + ") ");
    }
    if (function.getResourceUris().get(0).getResourceType() != ResourceType.JAR) {
        compatible = false;
        warnMessage.append("Function binary type: " + function.getResourceUris().get(0).getResourceType().name()
                + " is not supported. Only " + ResourceType.JAR.name() + " type is supported.");
    }
    if (!compatible) {
        LOG.warn("Skipping load of incompatible Java function: " + function.getFunctionName() + ". "
                + warnMessage.toString());
        return result;
    }
    String jarUri = function.getResourceUris().get(0).getUri();
    Class<?> udfClass = null;
    try {
        Path localJarPath = new Path(LOCAL_LIBRARY_PATH, UUID.randomUUID().toString() + ".jar");
        if (!FileSystemUtil.copyToLocal(new Path(jarUri), localJarPath)) {
            String errorMsg = "Error loading Java function: " + db + "." + function.getFunctionName()
                    + ". Couldn't copy " + jarUri + " to local path: " + localJarPath.toString();
            LOG.error(errorMsg);
            throw new ImpalaRuntimeException(errorMsg);
        }
        URL[] classLoaderUrls = new URL[] { new URL(localJarPath.toString()) };
        URLClassLoader urlClassLoader = new URLClassLoader(classLoaderUrls);
        udfClass = urlClassLoader.loadClass(function.getClassName());
        // Check if the class is of UDF type. Currently we don't support other functions
        // TODO: Remove this once we support Java UDAF/UDTF
        if (FunctionUtils.getUDFClassType(udfClass) != FunctionUtils.UDFClassType.UDF) {
            LOG.warn("Ignoring load of incompatible Java function: " + function.getFunctionName() + " as "
                    + FunctionUtils.getUDFClassType(udfClass)
                    + " is not a supported type. Only UDFs are supported");
            return result;
        }
        // Load each method in the UDF class and create the corresponding Impala Function
        // object.
        for (Method m : udfClass.getMethods()) {
            if (!m.getName().equals("evaluate"))
                continue;
            Function fn = ScalarFunction.fromHiveFunction(db, function.getFunctionName(),
                    function.getClassName(), m.getParameterTypes(), m.getReturnType(), jarUri);
            if (fn == null) {
                LOG.warn("Ignoring incompatible method: " + m.toString() + " during load of " + "Hive UDF:"
                        + function.getFunctionName() + " from " + udfClass);
                continue;
            }
            if (!addedSignatures.contains(fn.signatureString())) {
                result.add(fn);
                addedSignatures.add(fn.signatureString());
            }
        }
    } catch (ClassNotFoundException c) {
        String errorMsg = "Error loading Java function: " + db + "." + function.getFunctionName()
                + ". Symbol class " + udfClass + "not found in Jar: " + jarUri;
        LOG.error(errorMsg);
        throw new ImpalaRuntimeException(errorMsg, c);
    } catch (Exception e) {
        LOG.error("Skipping function load: " + function.getFunctionName(), e);
        throw new ImpalaRuntimeException("Error extracting functions", e);
    }
    return result;
}

From source file:org.pentaho.hadoop.mapreduce.test.TransMapReduceJobTestFIXME.java

@Test
public void submitJob() throws Exception {

    String[] args = { "hdfs://" + hostname + ":" + hdfsPort + "/junit/wordcount/input",
            "hdfs://" + hostname + ":" + hdfsPort + "/junit/wordcount/output" };

    JobConf conf = new JobConf();
    conf.setJobName("wordcount");

    KettleEnvironment.init();//w  ww .j  av a 2 s. c  om
    TransExecutionConfiguration transExecConfig = new TransExecutionConfiguration();
    TransMeta transMeta = new TransMeta("./test-res/wordcount-mapper.ktr");
    TransConfiguration transConfig = new TransConfiguration(transMeta, transExecConfig);
    conf.set("transformation-map-xml", transConfig.getXML());

    transMeta = new TransMeta("./test-res/wordcount-reducer.ktr");
    transConfig = new TransConfiguration(transMeta, transExecConfig);
    conf.set("transformation-reduce-xml", transConfig.getXML());

    conf.set("transformation-map-input-stepname", "Injector");
    conf.set("transformation-map-output-stepname", "Output");

    conf.set("transformation-reduce-input-stepname", "Injector");
    conf.set("transformation-reduce-output-stepname", "Output");

    conf.setOutputKeyClass(Text.class);
    conf.setOutputValueClass(IntWritable.class);

    File jar = new File("./dist/pentaho-big-data-plugin-TRUNK-SNAPSHOT.jar");

    URLClassLoader loader = new URLClassLoader(new URL[] { jar.toURI().toURL() });

    conf.setMapperClass(
            (Class<? extends Mapper>) loader.loadClass("org.pentaho.hadoop.mapreduce.GenericTransMap"));
    conf.setCombinerClass(
            (Class<? extends Reducer>) loader.loadClass("org.pentaho.hadoop.mapreduce.GenericTransReduce"));
    conf.setReducerClass(
            (Class<? extends Reducer>) loader.loadClass("org.pentaho.hadoop.mapreduce.GenericTransReduce"));

    conf.setInputFormat(TextInputFormat.class);
    conf.setOutputFormat(TextOutputFormat.class);

    FileInputFormat.setInputPaths(conf, new Path(args[0]));
    FileOutputFormat.setOutputPath(conf, new Path(args[1]));

    conf.set("fs.default.name", "hdfs://" + hostname + ":" + hdfsPort);
    conf.set("mapred.job.tracker", hostname + ":" + trackerPort);

    conf.setJar(jar.toURI().toURL().toExternalForm());
    conf.setWorkingDirectory(new Path("/tmp/wordcount"));

    JobClient jobClient = new JobClient(conf);
    ClusterStatus status = jobClient.getClusterStatus();
    assertEquals(State.RUNNING, status.getJobTrackerState());

    RunningJob runningJob = jobClient.submitJob(conf);
    System.out.print("Running " + runningJob.getJobName() + "");
    while (!runningJob.isComplete()) {
        System.out.print(".");
        Thread.sleep(500);
    }
    System.out.println();
    System.out.println("Finished " + runningJob.getJobName() + ".");

    FileObject file = fsManager.resolveFile(buildHDFSURL("/junit/wordcount/output/part-00000"));
    String output = IOUtils.toString(file.getContent().getInputStream());
    assertEquals(
            "Bye\t4\nGood\t2\nGoodbye\t1\nHadoop\t2\nHello\t5\nThis\t1\nWorld\t5\nand\t1\ncounting\t1\nextra\t1\nfor\t1\nis\t1\nsome\t1\ntext\t1\nwords\t1\n",
            output);
}

From source file:org.gradle.groovy.scripts.DefaultScriptCompilationHandler.java

public <T extends Script> Class<? extends T> loadFromDir(ScriptSource source, ClassLoader classLoader,
        File scriptCacheDir, Class<T> scriptBaseClass) {
    if (new File(scriptCacheDir, EMPTY_SCRIPT_MARKER_FILE_NAME).isFile()) {
        return new AsmBackedEmptyScriptGenerator().generate(scriptBaseClass);
    }/*w w w . j  a v  a 2 s .  com*/

    try {
        URLClassLoader urlClassLoader = new URLClassLoader(WrapUtil.toArray(scriptCacheDir.toURI().toURL()),
                classLoader);
        return urlClassLoader.loadClass(source.getClassName()).asSubclass(scriptBaseClass);
    } catch (Exception e) {
        throw new GradleException(String.format(
                "Could not load compiled classes for %s from cache.\n" + "*****\n"
                        + "Sometimes this error occurs when the cache was tinkered with.\n"
                        + "You may try to resolve it by deleting this folder:\n" + "%s\n" + "*****\n",
                source.getDisplayName(), scriptCacheDir.getAbsolutePath()), e);
    }
}

From source file:org.springframework.ldap.odm.test.TestSchemaToJava.java

@Test
public void generate() throws Exception {
    final String className = "Person";
    final String packageName = "org.springframework.ldap.odm.testclasses";

    File tempFile = File.createTempFile("test-odm-syntax-to-class-map", ".txt");
    FileUtils.copyInputStreamToFile(new ClassPathResource("/syntax-to-class-map.txt").getInputStream(),
            tempFile);/*  ww  w . j av a  2 s. c o  m*/

    // Add classes dir to class path - needed for compilation
    System.setProperty("java.class.path",
            System.getProperty("java.class.path") + File.pathSeparator + "target/classes");

    String[] flags = new String[] { "--url", "ldap://127.0.0.1:" + port, "--objectclasses",
            "organizationalperson", "--syntaxmap", tempFile.getAbsolutePath(), "--class", className,
            "--package", packageName, "--outputdir", tempDir };

    // Generate the code using SchemaToJava
    SchemaToJava.main(flags);

    tempFile.delete();

    // Java 5 - we'll use the Java 6 Compiler API once we can drop support for Java 5.
    String javaDir = calculateOutputDirectory(tempDir, packageName);

    CompilerInterface.compile(javaDir, className + ".java");
    // Java 5

    // OK it compiles so lets load our new class
    URL[] urls = new URL[] { new File(tempDir).toURI().toURL() };
    URLClassLoader ucl = new URLClassLoader(urls, getClass().getClassLoader());
    Class<?> clazz = ucl.loadClass(packageName + "." + className);

    // Create our OdmManager using our new class
    OdmManagerImpl odmManager = new OdmManagerImpl(converterManager, contextSource);
    odmManager.addManagedClass(clazz);

    // And try reading from the directory using it
    LdapName testDn = LdapUtils.newLdapName(baseName);
    testDn.addAll(LdapUtils.newLdapName("cn=William Hartnell,ou=Doctors"));
    Object fromDirectory = odmManager.read(clazz, testDn);

    LOG.debug(String.format("Read - %1$s", fromDirectory));

    // Check some returned values
    Method getDnMethod = clazz.getMethod("getDn");
    Object dn = getDnMethod.invoke(fromDirectory);
    assertEquals(testDn, dn);

    Method getCnIteratorMethod = clazz.getMethod("getCnIterator");
    @SuppressWarnings("unchecked")
    Iterator<String> cnIterator = (Iterator<String>) getCnIteratorMethod.invoke(fromDirectory);
    int cnCount = 0;
    while (cnIterator.hasNext()) {
        cnCount++;
        assertEquals("William Hartnell", cnIterator.next());
    }
    assertEquals(1, cnCount);

    Method telephoneNumberIteratorMethod = clazz.getMethod("getTelephoneNumberIterator");
    @SuppressWarnings("unchecked")
    Iterator<Integer> telephoneNumberIterator = (Iterator<Integer>) telephoneNumberIteratorMethod
            .invoke(fromDirectory);
    int telephoneNumberCount = 0;
    while (telephoneNumberIterator.hasNext()) {
        telephoneNumberCount++;
        assertEquals(Integer.valueOf(1), telephoneNumberIterator.next());
    }
    assertEquals(1, telephoneNumberCount);

    // Reread and check whether equals and hashCode are at least sane
    Object fromDirectory2 = odmManager.read(clazz, testDn);
    assertEquals(fromDirectory, fromDirectory2);
    assertEquals(fromDirectory.hashCode(), fromDirectory2.hashCode());
}

From source file:org.dbflute.intro.app.logic.dfprop.TestConnectionLogic.java

private Driver prepareJdbcDriver(String dbfluteVersion, OptionalThing<String> jdbcDriverJarPath,
        DatabaseInfoMap databaseInfoMap)
        throws ClassNotFoundException, InstantiationException, IllegalAccessException, MalformedURLException {
    final List<URL> urls = new ArrayList<URL>();
    if (jdbcDriverJarPath.isPresent()) {
        final String jarPath = jdbcDriverJarPath.get();
        final URL fileUrl = new File(jarPath).toURI().toURL();
        urls.add(fileUrl);//from  w  w w . j  a v  a  2s .c o  m
    } else {
        final File libDir = enginePhysicalLogic.findLibDir(dbfluteVersion);
        if (libDir.isDirectory()) {
            for (File existingJarFile : FileUtils.listFiles(libDir, FileFilterUtils.suffixFileFilter(".jar"),
                    null)) {
                try {
                    urls.add(existingJarFile.toURI().toURL());
                } catch (MalformedURLException e) { // no way
                    throw new IllegalStateException(
                            "Failed to create the URL for the jar file: " + existingJarFile.getPath());
                }
            }
        }
    }
    final URLClassLoader loader = URLClassLoader.newInstance(urls.toArray(new URL[urls.size()]));
    final String jdbcDriver = databaseInfoMap.getDriver();

    @SuppressWarnings("unchecked")
    final Class<Driver> driverClass = (Class<Driver>) loader.loadClass(jdbcDriver);
    return driverClass.newInstance();
}

From source file:cf.janga.hook.core.SimplePluginLoader.java

<T extends CoreAPI> Plugin<T> loadPluginIntoClasspath(PluginFile<T> pluginFile) throws PluginException {
    pluginFile.getFilePath();/*from w  w w.j  av  a2s. c  o  m*/
    String pluginClassName = pluginFile.getPluginClass();
    if (StringUtils.isBlank(pluginClassName)) {
        throw new PluginException("The plugin class has not been provided on the manifest of the plugin file.");
    }
    try {
        URLClassLoader classLoader = URLClassLoader
                .newInstance(new URL[] { new File(pluginFile.getFilePath()).toURI().toURL() });
        Class<Plugin<T>> pluginClass = (Class<Plugin<T>>) classLoader.loadClass(pluginClassName);
        return pluginClass.newInstance();
    } catch (MalformedURLException e) {
        throw new PluginException("Error loading plugin file. The plugin folder may be missing or malformed.",
                e);
    } catch (ClassNotFoundException e) {
        throw new PluginException(
                "Error loading plugin file. The plugin class cannot be found on the file provided.", e);
    } catch (InstantiationException e) {
        throw new PluginException("Error instantiating the plugin class.", e);
    } catch (IllegalAccessException e) {
        throw new PluginException("Error instantiating the plugin. Illegal access to plugin class.", e);
    }
}

From source file:me.jaimegarza.syntax.test.java.TestJavaExpandedScanner.java

@Test
public void test03Runtime() throws ParsingException, AnalysisException, OutputException, MalformedURLException,
        ClassNotFoundException, InstantiationException, IllegalAccessException, SecurityException,
        NoSuchMethodException, IllegalArgumentException, InvocationTargetException {
    generateLanguageFile(packedArgs);//ww  w.j  a v a  2 s. c  o m

    File source = new File(tmpLanguageFile);
    File sourceDir = source.getParentFile();
    CompilationResult result = compileJavaFile(source, sourceDir);
    Assert.assertEquals(result.getErrors().length, 0, "Syntax errors found trying to execute");

    URL urls[] = new URL[1];
    urls[0] = sourceDir.toURI().toURL();
    URLClassLoader classLoader = URLClassLoader.newInstance(urls, this.getClass().getClassLoader());
    String className = FilenameUtils.getBaseName(tmpLanguageFile);
    Class<?> clazz = classLoader.loadClass(className);
    String lexicalClassName = className + "$StackElement";
    Class<?> lexicalClazz = classLoader.loadClass(lexicalClassName);
    Object parser = clazz.newInstance();
    Method setVerbose = parser.getClass().getMethod("setVerbose", boolean.class);
    Method init = parser.getClass().getMethod("init");
    Method parse = parser.getClass().getMethod("parse", Integer.TYPE, lexicalClazz);
    Method getValidTokens = parser.getClass().getMethod("getValidTokens");
    Method getTotal = parser.getClass().getMethod("getTotal");
    setVerbose.invoke(parser, true);
    init.invoke(parser);
    for (Parameter p : parameters) {
        int[] tokens = (int[]) getValidTokens.invoke(parser);
        Assert.assertTrue(arrayContains(tokens, p.token), "Token " + p.token + " ain't there");
        Object lexicalValue = lexicalClazz.newInstance();
        Method setNumber = lexicalClazz.getMethod("setNumber", Integer.TYPE);
        setNumber.invoke(lexicalValue, p.value);
        parse.invoke(parser, p.token, lexicalValue);
        Object t = getTotal.invoke(parser);
        Assert.assertEquals(((Integer) t).intValue(), p.result, "Result is not " + p.result);
    }
    Object o = getTotal.invoke(parser);
    Assert.assertTrue(o instanceof Integer);
    Integer i = (Integer) o;
    Assert.assertEquals((int) i, -17, "total does not match");
}

From source file:de.knurt.fam.plugin.DefaultPluginResolver.java

private void initPlugins() {
    File pluginDirectory = new File(FamConnector.me().getPluginDirectory());
    if (pluginDirectory.exists() && pluginDirectory.isDirectory() && pluginDirectory.canRead()) {
        File[] files = pluginDirectory.listFiles();
        ClassLoader currentThreadClassLoader = Thread.currentThread().getContextClassLoader();
        for (File file : files) {
            if (file.isFile() && file.getName().toLowerCase().endsWith("jar")) {
                JarFile jar = null;
                try {
                    jar = new JarFile(file.getAbsoluteFile().toString());
                    Enumeration<JarEntry> jarEntries = jar.entries();
                    while (jarEntries.hasMoreElements()) {
                        JarEntry entry = jarEntries.nextElement();
                        if (entry.getName().toLowerCase().endsWith("class")) {
                            String className = entry.getName().replaceAll("/", ".").replaceAll("\\.class$", "");
                            // @SuppressWarnings("resource") // classLoader must not be closed, getting an "IllegalStateException: zip file closed" otherwise
                            URLClassLoader classLoader = new URLClassLoader(new URL[] { file.toURI().toURL() },
                                    currentThreadClassLoader);
                            Class<?> cl = classLoader.loadClass(className);
                            if (this.isPlugin(cl)) {
                                Plugin plugin = (Plugin) cl.newInstance();
                                this.plugins.add(plugin);
                            }//from  ww  w .j  av a2  s.c  o  m
                        }
                    }
                } catch (IllegalAccessException e) {
                    e.printStackTrace();
                    FamLog.logException(this.getClass(), e, "failed to load plugin", 201010091426l);
                } catch (InstantiationException e) {
                    e.printStackTrace();
                    FamLog.logException(this.getClass(), e, "failed to load plugin", 201010091424l);
                } catch (ClassNotFoundException e) {
                    e.printStackTrace();
                    FamLog.logException(this.getClass(), e, "failed to load plugin", 201010091425l);
                } catch (IOException e) {
                    e.printStackTrace();
                    FamLog.logException(this.getClass(), e, "failed to load plugin", 201010091351l);
                } finally {
                    try {
                        jar.close();
                    } catch (Exception e) {
                    }
                }
            }
        }
        for (Plugin plugin : this.plugins) {
            boolean found = false;
            if (this.implementz(plugin.getClass(), RegisterSubmission.class)) {
                if (found == true) {
                    throw new PluginConfigurationException("Found more than one RegisterSubmission classes");
                    // TODO #19 supply a solution Ticket
                }
                this.registerSubmission = (RegisterSubmission) plugin;
                found = true;
            }
        }
        for (Plugin plugin : this.plugins) {
            plugin.start();
        }
    }
    // search plugin
    if (this.registerSubmission == null) {
        this.registerSubmission = new DefaultRegisterSubmission();
    }
}

From source file:org.apache.impala.catalog.CatalogServiceCatalog.java

/**
 * Returns a list of Impala Functions, one per compatible "evaluate" method in the UDF
 * class referred to by the given Java function. This method copies the UDF Jar
 * referenced by "function" to a temporary file in localLibraryPath_ and loads it
 * into the jvm. Then we scan all the methods in the class using reflection and extract
 * those methods and create corresponding Impala functions. Currently Impala supports
 * only "JAR" files for symbols and also a single Jar containing all the dependent
 * classes rather than a set of Jar files.
 *///w ww .j a va  2s . com
public static List<Function> extractFunctions(String db, org.apache.hadoop.hive.metastore.api.Function function)
        throws ImpalaRuntimeException {
    List<Function> result = Lists.newArrayList();
    List<String> addedSignatures = Lists.newArrayList();
    StringBuilder warnMessage = new StringBuilder();
    if (!isFunctionCompatible(function, warnMessage)) {
        LOG.warn("Skipping load of incompatible function: " + function.getFunctionName() + ". "
                + warnMessage.toString());
        return result;
    }
    String jarUri = function.getResourceUris().get(0).getUri();
    Class<?> udfClass = null;
    Path localJarPath = null;
    try {
        localJarPath = new Path(localLibraryPath_, UUID.randomUUID().toString() + ".jar");
        try {
            FileSystemUtil.copyToLocal(new Path(jarUri), localJarPath);
        } catch (IOException e) {
            String errorMsg = "Error loading Java function: " + db + "." + function.getFunctionName()
                    + ". Couldn't copy " + jarUri + " to local path: " + localJarPath.toString();
            LOG.error(errorMsg, e);
            throw new ImpalaRuntimeException(errorMsg);
        }
        URL[] classLoaderUrls = new URL[] { new URL(localJarPath.toString()) };
        URLClassLoader urlClassLoader = new URLClassLoader(classLoaderUrls);
        udfClass = urlClassLoader.loadClass(function.getClassName());
        // Check if the class is of UDF type. Currently we don't support other functions
        // TODO: Remove this once we support Java UDAF/UDTF
        if (FunctionUtils.getUDFClassType(udfClass) != FunctionUtils.UDFClassType.UDF) {
            LOG.warn("Ignoring load of incompatible Java function: " + function.getFunctionName() + " as "
                    + FunctionUtils.getUDFClassType(udfClass)
                    + " is not a supported type. Only UDFs are supported");
            return result;
        }
        // Load each method in the UDF class and create the corresponding Impala Function
        // object.
        for (Method m : udfClass.getMethods()) {
            if (!m.getName().equals(UdfExecutor.UDF_FUNCTION_NAME))
                continue;
            Function fn = ScalarFunction.fromHiveFunction(db, function.getFunctionName(),
                    function.getClassName(), m.getParameterTypes(), m.getReturnType(), jarUri);
            if (fn == null) {
                LOG.warn("Ignoring incompatible method: " + m.toString() + " during load of " + "Hive UDF:"
                        + function.getFunctionName() + " from " + udfClass);
                continue;
            }
            if (!addedSignatures.contains(fn.signatureString())) {
                result.add(fn);
                addedSignatures.add(fn.signatureString());
            }
        }
    } catch (ClassNotFoundException c) {
        String errorMsg = "Error loading Java function: " + db + "." + function.getFunctionName()
                + ". Symbol class " + udfClass + "not found in Jar: " + jarUri;
        LOG.error(errorMsg);
        throw new ImpalaRuntimeException(errorMsg, c);
    } catch (Exception e) {
        LOG.error("Skipping function load: " + function.getFunctionName(), e);
        throw new ImpalaRuntimeException("Error extracting functions", e);
    } catch (LinkageError e) {
        String errorMsg = "Error resolving dependencies for Java function: " + db + "."
                + function.getFunctionName();
        LOG.error(errorMsg);
        throw new ImpalaRuntimeException(errorMsg, e);
    } finally {
        if (localJarPath != null)
            FileSystemUtil.deleteIfExists(localJarPath);
    }
    return result;
}