List of usage examples for java.net URLClassLoader loadClass
public Class<?> loadClass(String name) throws ClassNotFoundException
From source file:org.fusesource.mop.MOP.java
protected void runClass(List<File> dependencies) throws Exception { URLClassLoader classLoader = MOPRepository.createFileClassLoader(null, dependencies); Thread.currentThread().setContextClassLoader(classLoader); LOG.debug("Attempting to load class: " + className); Class<?> aClass = classLoader.loadClass(className); Method method = aClass.getMethod("main", String[].class); String[] commandLineArgs = reminingArgs.toArray(new String[reminingArgs.size()]); Object[] methodArgs = { commandLineArgs }; method.invoke(null, methodArgs);//ww w.j a v a 2 s. c o m }
From source file:net.pms.external.ExternalFactory.java
/** * This method loads the jar files found in the plugin dir * or if installed from the web./*from w ww .j a va2 s . com*/ */ public static void loadJAR(URL[] jarURL, boolean download, URL newURL) { /* Create a classloader to take care of loading the plugin classes from * their URL. * * A not on the suppressed warning: The classloader need to remain open as long * as the loaded classes are in use - in our case forever. * @see http://stackoverflow.com/questions/13944868/leaving-classloader-open-after-first-use */ @SuppressWarnings("resource") URLClassLoader classLoader = new URLClassLoader(jarURL); Enumeration<URL> resources; try { // Each plugin .jar file has to contain a resource named "plugin" // which should contain the name of the main plugin class. resources = classLoader.getResources("plugin"); } catch (IOException e) { LOGGER.error("Can't load plugin resources: {}", e.getMessage()); LOGGER.trace("", e); try { classLoader.close(); } catch (IOException e2) { // Just swallow } return; } while (resources.hasMoreElements()) { URL url = resources.nextElement(); try { // Determine the plugin main class name from the contents of // the plugin file. char[] name; try (InputStreamReader in = new InputStreamReader(url.openStream())) { name = new char[512]; in.read(name); } String pluginMainClassName = new String(name).trim(); LOGGER.info("Found plugin: " + pluginMainClassName); if (download) { // Only purge code when downloading! purgeCode(pluginMainClassName, newURL); } // Try to load the class based on the main class name Class<?> clazz = classLoader.loadClass(pluginMainClassName); registerListenerClass(clazz); if (download) { downloadedListenerClasses.add(clazz); } } catch (Exception | NoClassDefFoundError e) { LOGGER.error("Error loading plugin", e); } } }
From source file:org.b3log.latke.plugin.PluginManager.java
/** * Loads a plugin by the specified plugin directory and put it into the * specified holder.//from w w w .j a va 2 s. c om * * @param pluginDir the specified plugin directory * @param holder the specified holder * @return loaded plugin * @throws Exception exception */ private AbstractPlugin load(final File pluginDir, final HashMap<String, HashSet<AbstractPlugin>> holder) throws Exception { final Properties props = new Properties(); props.load(new FileInputStream(pluginDir.getPath() + File.separator + "plugin.properties")); final File defaultClassesFileDir = new File(pluginDir.getPath() + File.separator + "classes"); final URL defaultClassesFileDirURL = defaultClassesFileDir.toURI().toURL(); final String webRoot = StringUtils.substringBeforeLast(AbstractServletListener.getWebRoot(), File.separator); final String classesFileDirPath = webRoot + props.getProperty("classesDirPath"); final File classesFileDir = new File(classesFileDirPath); final URL classesFileDirURL = classesFileDir.toURI().toURL(); final URLClassLoader classLoader = new URLClassLoader( new URL[] { defaultClassesFileDirURL, classesFileDirURL }, PluginManager.class.getClassLoader()); classLoaders.add(classLoader); String pluginClassName = props.getProperty(Plugin.PLUGIN_CLASS); if (StringUtils.isBlank(pluginClassName)) { pluginClassName = NotInteractivePlugin.class.getName(); } final String rendererId = props.getProperty(Plugin.PLUGIN_RENDERER_ID); if (StringUtils.isBlank(rendererId)) { LOGGER.log(Level.WARNING, "no renderer defined by this plugin[" + pluginDir.getName() + "]this plugin will be ignore!"); return null; } final Class<?> pluginClass = classLoader.loadClass(pluginClassName); LOGGER.log(Level.FINEST, "Loading plugin class[name={0}]", pluginClassName); final AbstractPlugin ret = (AbstractPlugin) pluginClass.newInstance(); ret.setRendererId(rendererId); setPluginProps(pluginDir, ret, props); registerEventListeners(props, classLoader, ret); register(ret, holder); ret.changeStatus(); return ret; }
From source file:org.ebayopensource.turmeric.tools.errorlibrary.ErrorLibraryFileGenerationTest.java
private Class<?> loadTestProjectClass(String classname, File... srcDirs) throws Exception { URL urls[] = new URL[srcDirs.length]; for (int i = 0; i < srcDirs.length; i++) { urls[i] = srcDirs[i].toURI().toURL(); }/* ww w. j a va 2 s . c o m*/ URLClassLoader classloader = new URLClassLoader(urls); return classloader.loadClass(classname); }
From source file:org.interreg.docexplore.GeneralConfigPanel.java
String browseClasses(File file) { try {// ww w. j a va 2 s.c om List<Class<?>> metaDataPlugins = new LinkedList<Class<?>>(); List<Class<?>> analysisPlugins = new LinkedList<Class<?>>(); List<Class<?>> clientPlugins = new LinkedList<Class<?>>(); List<Class<?>> serverPlugins = new LinkedList<Class<?>>(); List<Class<?>> inputPlugins = new LinkedList<Class<?>>(); List<URL> urls = Startup.extractDependencies(file.getName().substring(0, file.getName().length() - 4), file.getName()); urls.add(file.toURI().toURL()); URLClassLoader loader = new URLClassLoader(urls.toArray(new URL[] {}), this.getClass().getClassLoader()); JarFile jarFile = new JarFile(file); Enumeration<JarEntry> entries = jarFile.entries(); while (entries.hasMoreElements()) { JarEntry entry = entries.nextElement(); if (!entry.getName().endsWith(".class") || entry.getName().indexOf('$') > 0) continue; String className = entry.getName().substring(0, entry.getName().length() - 6).replace('/', '.'); Class<?> clazz = null; try { clazz = loader.loadClass(className); System.out.println("Reading " + className); } catch (NoClassDefFoundError e) { System.out.println("Couldn't read " + className); } if (clazz == null) continue; if (clazz.isInterface() || Modifier.isAbstract(clazz.getModifiers())) continue; if (MetaDataPlugin.class.isAssignableFrom(clazz)) metaDataPlugins.add(clazz); if (AnalysisPlugin.class.isAssignableFrom(clazz)) analysisPlugins.add(clazz); if (ClientPlugin.class.isAssignableFrom(clazz)) clientPlugins.add(clazz); if (ServerPlugin.class.isAssignableFrom(clazz)) serverPlugins.add(clazz); if (InputPlugin.class.isAssignableFrom(clazz)) inputPlugins.add(clazz); } jarFile.close(); @SuppressWarnings("unchecked") Pair<String, String>[] classes = new Pair[metaDataPlugins.size() + analysisPlugins.size() + clientPlugins.size() + serverPlugins.size() + inputPlugins.size()]; if (classes.length == 0) throw new Exception("Invalid plugin (no entry points were found)."); int cnt = 0; for (Class<?> clazz : metaDataPlugins) classes[cnt++] = new Pair<String, String>(clazz.getName(), "MetaData plugin") { public String toString() { return first + " (" + second + ")"; } }; for (Class<?> clazz : analysisPlugins) classes[cnt++] = new Pair<String, String>(clazz.getName(), "Analysis plugin") { public String toString() { return first + " (" + second + ")"; } }; for (Class<?> clazz : clientPlugins) classes[cnt++] = new Pair<String, String>(clazz.getName(), "Reader client plugin") { public String toString() { return first + " (" + second + ")"; } }; for (Class<?> clazz : serverPlugins) classes[cnt++] = new Pair<String, String>(clazz.getName(), "Reader server plugin") { public String toString() { return first + " (" + second + ")"; } }; for (Class<?> clazz : inputPlugins) classes[cnt++] = new Pair<String, String>(clazz.getName(), "Reader input plugin") { public String toString() { return first + " (" + second + ")"; } }; @SuppressWarnings("unchecked") Pair<String, String> res = (Pair<String, String>) JOptionPane.showInputDialog(this, "Please select an entry point for the plugin:", "Plugin entry point", JOptionPane.QUESTION_MESSAGE, null, classes, classes[0]); if (res != null) return res.first; } catch (Throwable e) { ErrorHandler.defaultHandler.submit(e); } return null; }
From source file:com.zimbra.cs.zimlet.ZimletUtil.java
/** * Loads all the Zimlets, locates the server side ZimletHandler for each Zimlets, * loads the class and instantiate the object, then returns the instance. * * @param name of the Zimlet/*from w ww .j ava 2 s .com*/ * @return ZimletHandler object */ public static ZimletHandler getHandler(String name) { loadZimlets(); Class zh = sZimletHandlers.get(name); if (zh == null) { ZimletFile zf = sZimlets.get(name); if (zf == null) { return null; } URLClassLoader cl = null; try { String clazz = zf.getZimletDescription().getServerExtensionClass(); if (clazz != null) { URL[] urls = { zf.toURL() }; cl = new URLClassLoader(urls, ZimletUtil.class.getClassLoader()); zh = cl.loadClass(clazz); ZimbraLog.zimlet.info("Loaded class " + zh.getName()); sZimletHandlers.put(name, zh); } } catch (Exception e) { ZimbraLog.zimlet.warn("Unable to load zimlet handler for %s", name, e); return null; } finally { if (cl != null) { try { cl.close(); } catch (IOException e) { ZimbraLog.zimlet.warn("failed to close URLClassLoader", e); } } } } try { if (zh != null) { return (ZimletHandler) zh.newInstance(); } } catch (Exception e) { ZimbraLog.zimlet.warn("Unable to instantiate zimlet handler for " + name, e); } return null; }
From source file:gdt.jgui.entity.query.JQueryPanel.java
private String[] select() { try {/*from w ww .j a v a 2 s . c o m*/ Entigrator entigrator = console.getEntigrator(entihome$); Sack query = entigrator.getEntityAtKey(entityKey$); //String queryClass$=query.getElementItemAt("parameter", "query.class"); File queryHome = new File(entihome$ + "/" + entityKey$); URL url = queryHome.toURI().toURL(); URL[] urls = new URL[] { url }; ClassLoader parentLoader = JMainConsole.class.getClassLoader(); URLClassLoader cl = new URLClassLoader(urls, parentLoader); Class<?> cls = cl.loadClass(entityKey$); Object obj = cls.newInstance(); // Method method = obj.getClass().getDeclaredMethod("select",JMainConsole.class,String.class); Method method = obj.getClass().getDeclaredMethod("select", JMainConsole.class, String.class); Object value = method.invoke(obj, console, entihome$); String[] sa = (String[]) value; String[] ea = query.elementList("exclude"); if (ea == null) return sa; else { ArrayList<String> sl = new ArrayList<String>(); String label$; for (String s : sa) { label$ = entigrator.indx_getLabel(s); if (query.getElementItem("exclude", label$) == null) sl.add(s); } return sl.toArray(new String[0]); } } catch (Exception e) { Logger.getLogger(JQueryPanel.class.getName()).severe(e.toString()); } return null; }
From source file:org.entando.entando.plugins.jpcomponentinstaller.aps.system.services.installer.DefaultComponentInstaller.java
private void loadClasses(File[] jarFiles, URLClassLoader cl) throws Exception { for (File input : jarFiles) { try {//from ww w. j a v a 2s.c o m //load classes from plugin's jar files using the classloader above //loadClassesFromJar(input, cl); JarFile jarFile = new JarFile(input.getAbsolutePath()); Enumeration e = jarFile.entries(); while (e.hasMoreElements()) { JarEntry je = (JarEntry) e.nextElement(); if (je.isDirectory() || !je.getName().endsWith(".class")) { continue; } String className = je.getName().substring(0, je.getName().length() - 6); className = className.replace('/', '.'); try { cl.loadClass(className); } catch (Throwable ex) { String error = "Error loadin class: " + className; _logger.error(error); } } } catch (Throwable e) { String error = "Unexpected error loading class for file: " + input.getName() + " - " + e.getMessage(); _logger.error(error, e); throw new Exception(error, e); } } }
From source file:org.pentaho.di.job.entries.hadoopjobexecutor.JobEntryHadoopJobExecutor.java
public Result execute(final Result result, int arg1) throws KettleException { result.setNrErrors(0);//from w w w.j av a 2 s.c o m Log4jFileAppender appender = null; String logFileName = "pdi-" + this.getName(); //$NON-NLS-1$ try { appender = LogWriter.createFileAppender(logFileName, true, false); LogWriter.getInstance().addAppender(appender); log.setLogLevel(parentJob.getLogLevel()); } catch (Exception e) { logError(BaseMessages.getString(PKG, "JobEntryHadoopJobExecutor.FailedToOpenLogFile", logFileName, //$NON-NLS-1$ e.toString())); logError(Const.getStackTracker(e)); } try { URL resolvedJarUrl = resolveJarUrl(jarUrl); if (log.isDetailed()) { logDetailed(BaseMessages.getString(PKG, "JobEntryHadoopJobExecutor.ResolvedJar", resolvedJarUrl.toExternalForm())); } HadoopShim shim = getHadoopConfiguration().getHadoopShim(); if (isSimple) { String simpleLoggingIntervalS = environmentSubstitute(getSimpleLoggingInterval()); int simpleLogInt = 60; try { simpleLogInt = Integer.parseInt(simpleLoggingIntervalS, 10); } catch (NumberFormatException e) { logError(BaseMessages.getString(PKG, "ErrorParsingLogInterval", simpleLoggingIntervalS, simpleLogInt)); } final Class<?> mainClass = locateDriverClass(resolvedJarUrl, shim); if (log.isDetailed()) { logDetailed(BaseMessages.getString(PKG, "JobEntryHadoopJobExecutor.UsingDriverClass", mainClass == null ? "null" : mainClass.getName())); logDetailed(BaseMessages.getString(PKG, "JobEntryHadoopJobExecutor.SimpleMode")); } final AtomicInteger threads = new AtomicInteger(1); final NoExitSecurityManager nesm = new NoExitSecurityManager(System.getSecurityManager()); smStack.setSecurityManager(nesm); try { Runnable r = new Runnable() { public void run() { try { try { executeMainMethod(mainClass); } finally { restoreSecurityManager(threads, nesm); } } catch (NoExitSecurityManager.NoExitSecurityException ex) { // Only log if we're blocking and waiting for this to complete if (simpleBlocking) { logExitStatus(result, mainClass, ex); } } catch (InvocationTargetException ex) { if (ex.getTargetException() instanceof NoExitSecurityManager.NoExitSecurityException) { // Only log if we're blocking and waiting for this to complete if (simpleBlocking) { logExitStatus(result, mainClass, (NoExitSecurityManager.NoExitSecurityException) ex .getTargetException()); } } else { throw new RuntimeException(ex); } } catch (Exception ex) { throw new RuntimeException(ex); } } }; Thread t = new Thread(r); t.setDaemon(true); t.setUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() { @Override public void uncaughtException(Thread t, Throwable e) { restoreSecurityManager(threads, nesm); if (simpleBlocking) { // Only log if we're blocking and waiting for this to complete logError(BaseMessages.getString(JobEntryHadoopJobExecutor.class, "JobEntryHadoopJobExecutor.ErrorExecutingClass", mainClass.getName()), e); result.setResult(false); } } }); nesm.addBlockedThread(t); t.start(); if (simpleBlocking) { // wait until the thread is done do { logDetailed(BaseMessages.getString(JobEntryHadoopJobExecutor.class, "JobEntryHadoopJobExecutor.Blocking", mainClass.getName())); t.join(simpleLogInt * 1000); } while (!parentJob.isStopped() && t.isAlive()); if (t.isAlive()) { // Kill thread if it's still running. The job must have been stopped. t.interrupt(); } } } finally { // If we're not performing simple blocking spawn a watchdog thread to restore the security manager when all // threads are complete if (!simpleBlocking) { Runnable threadWatchdog = new Runnable() { @Override public void run() { while (threads.get() > 0) { try { Thread.sleep(100); } catch (InterruptedException e) { /* ignore */ } } restoreSecurityManager(threads, nesm); } }; Thread watchdog = new Thread(threadWatchdog); watchdog.setDaemon(true); watchdog.start(); } } } else { if (log.isDetailed()) { logDetailed(BaseMessages.getString(PKG, "JobEntryHadoopJobExecutor.AdvancedMode")); } Configuration conf = shim.createConfiguration(); FileSystem fs = shim.getFileSystem(conf); URL[] urls = new URL[] { resolvedJarUrl }; URLClassLoader loader = new URLClassLoader(urls, shim.getClass().getClassLoader()); String hadoopJobNameS = environmentSubstitute(hadoopJobName); conf.setJobName(hadoopJobNameS); String outputKeyClassS = environmentSubstitute(outputKeyClass); conf.setOutputKeyClass(loader.loadClass(outputKeyClassS)); String outputValueClassS = environmentSubstitute(outputValueClass); conf.setOutputValueClass(loader.loadClass(outputValueClassS)); if (mapperClass != null) { String mapperClassS = environmentSubstitute(mapperClass); Class<?> mapper = loader.loadClass(mapperClassS); conf.setMapperClass(mapper); } if (combinerClass != null) { String combinerClassS = environmentSubstitute(combinerClass); Class<?> combiner = loader.loadClass(combinerClassS); conf.setCombinerClass(combiner); } if (reducerClass != null) { String reducerClassS = environmentSubstitute(reducerClass); Class<?> reducer = loader.loadClass(reducerClassS); conf.setReducerClass(reducer); } if (inputFormatClass != null) { String inputFormatClassS = environmentSubstitute(inputFormatClass); Class<?> inputFormat = loader.loadClass(inputFormatClassS); conf.setInputFormat(inputFormat); } if (outputFormatClass != null) { String outputFormatClassS = environmentSubstitute(outputFormatClass); Class<?> outputFormat = loader.loadClass(outputFormatClassS); conf.setOutputFormat(outputFormat); } String hdfsHostnameS = environmentSubstitute(hdfsHostname); String hdfsPortS = environmentSubstitute(hdfsPort); String jobTrackerHostnameS = environmentSubstitute(jobTrackerHostname); String jobTrackerPortS = environmentSubstitute(jobTrackerPort); List<String> configMessages = new ArrayList<String>(); shim.configureConnectionInformation(hdfsHostnameS, hdfsPortS, jobTrackerHostnameS, jobTrackerPortS, conf, configMessages); for (String m : configMessages) { logBasic(m); } String inputPathS = environmentSubstitute(inputPath); String[] inputPathParts = inputPathS.split(","); List<Path> paths = new ArrayList<Path>(); for (String path : inputPathParts) { paths.add(fs.asPath(conf.getDefaultFileSystemURL(), path)); } Path[] finalPaths = paths.toArray(new Path[paths.size()]); conf.setInputPaths(finalPaths); String outputPathS = environmentSubstitute(outputPath); conf.setOutputPath(fs.asPath(conf.getDefaultFileSystemURL(), outputPathS)); // process user defined values for (UserDefinedItem item : userDefined) { if (item.getName() != null && !"".equals(item.getName()) && item.getValue() != null && !"".equals(item.getValue())) { String nameS = environmentSubstitute(item.getName()); String valueS = environmentSubstitute(item.getValue()); conf.set(nameS, valueS); } } conf.setJar(environmentSubstitute(jarUrl)); String numMapTasksS = environmentSubstitute(numMapTasks); String numReduceTasksS = environmentSubstitute(numReduceTasks); int numM = 1; try { numM = Integer.parseInt(numMapTasksS); } catch (NumberFormatException e) { logError("Can't parse number of map tasks '" + numMapTasksS + "'. Setting num" + "map tasks to 1"); } int numR = 1; try { numR = Integer.parseInt(numReduceTasksS); } catch (NumberFormatException e) { logError("Can't parse number of reduce tasks '" + numReduceTasksS + "'. Setting num" + "reduce tasks to 1"); } conf.setNumMapTasks(numM); conf.setNumReduceTasks(numR); RunningJob runningJob = shim.submitJob(conf); String loggingIntervalS = environmentSubstitute(getLoggingInterval()); int logIntv = 60; try { logIntv = Integer.parseInt(loggingIntervalS); } catch (NumberFormatException e) { logError(BaseMessages.getString(PKG, "ErrorParsingLogInterval", loggingIntervalS, logIntv)); } if (blocking) { try { int taskCompletionEventIndex = 0; while (!parentJob.isStopped() && !runningJob.isComplete()) { if (logIntv >= 1) { printJobStatus(runningJob); taskCompletionEventIndex = logTaskMessages(runningJob, taskCompletionEventIndex); Thread.sleep(logIntv * 1000); } else { Thread.sleep(60000); } } if (parentJob.isStopped() && !runningJob.isComplete()) { // We must stop the job running on Hadoop runningJob.killJob(); // Indicate this job entry did not complete result.setResult(false); } printJobStatus(runningJob); // Log any messages we may have missed while polling logTaskMessages(runningJob, taskCompletionEventIndex); } catch (InterruptedException ie) { logError(ie.getMessage(), ie); } // Entry is successful if the MR job is successful overall result.setResult(runningJob.isSuccessful()); } } } catch (Throwable t) { t.printStackTrace(); result.setStopped(true); result.setNrErrors(1); result.setResult(false); logError(t.getMessage(), t); } if (appender != null) { LogWriter.getInstance().removeAppender(appender); appender.close(); ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_LOG, appender.getFile(), parentJob.getJobname(), getName()); result.getResultFiles().put(resultFile.getFile().toString(), resultFile); } return result; }
From source file:azkaban.webapp.AzkabanWebServer.java
private static Map<String, TriggerPlugin> loadTriggerPlugins(Context root, String pluginPath, AzkabanWebServer azkabanWebApp) { File triggerPluginPath = new File(pluginPath); if (!triggerPluginPath.exists()) { return new HashMap<String, TriggerPlugin>(); }//from www. j av a 2 s . c o m Map<String, TriggerPlugin> installedTriggerPlugins = new HashMap<String, TriggerPlugin>(); ClassLoader parentLoader = AzkabanWebServer.class.getClassLoader(); File[] pluginDirs = triggerPluginPath.listFiles(); ArrayList<String> jarPaths = new ArrayList<String>(); for (File pluginDir : pluginDirs) { if (!pluginDir.exists()) { logger.error("Error! Trigger plugin path " + pluginDir.getPath() + " doesn't exist."); continue; } if (!pluginDir.isDirectory()) { logger.error("The plugin path " + pluginDir + " is not a directory."); continue; } // Load the conf directory File propertiesDir = new File(pluginDir, "conf"); Props pluginProps = null; if (propertiesDir.exists() && propertiesDir.isDirectory()) { File propertiesFile = new File(propertiesDir, "plugin.properties"); File propertiesOverrideFile = new File(propertiesDir, "override.properties"); if (propertiesFile.exists()) { if (propertiesOverrideFile.exists()) { pluginProps = PropsUtils.loadProps(null, propertiesFile, propertiesOverrideFile); } else { pluginProps = PropsUtils.loadProps(null, propertiesFile); } } else { logger.error("Plugin conf file " + propertiesFile + " not found."); continue; } } else { logger.error("Plugin conf path " + propertiesDir + " not found."); continue; } String pluginName = pluginProps.getString("trigger.name"); List<String> extLibClasspath = pluginProps.getStringList("trigger.external.classpaths", (List<String>) null); String pluginClass = pluginProps.getString("trigger.class"); if (pluginClass == null) { logger.error("Trigger class is not set."); } else { logger.error("Plugin class " + pluginClass); } URLClassLoader urlClassLoader = null; File libDir = new File(pluginDir, "lib"); if (libDir.exists() && libDir.isDirectory()) { File[] files = libDir.listFiles(); ArrayList<URL> urls = new ArrayList<URL>(); for (int i = 0; i < files.length; ++i) { try { URL url = files[i].toURI().toURL(); urls.add(url); } catch (MalformedURLException e) { logger.error(e); } } if (extLibClasspath != null) { for (String extLib : extLibClasspath) { try { File file = new File(pluginDir, extLib); URL url = file.toURI().toURL(); urls.add(url); } catch (MalformedURLException e) { logger.error(e); } } } urlClassLoader = new URLClassLoader(urls.toArray(new URL[urls.size()]), parentLoader); } else { logger.error("Library path " + propertiesDir + " not found."); continue; } Class<?> triggerClass = null; try { triggerClass = urlClassLoader.loadClass(pluginClass); } catch (ClassNotFoundException e) { logger.error("Class " + pluginClass + " not found."); continue; } String source = FileIOUtils.getSourcePathFromClass(triggerClass); logger.info("Source jar " + source); jarPaths.add("jar:file:" + source); Constructor<?> constructor = null; try { constructor = triggerClass.getConstructor(String.class, Props.class, Context.class, AzkabanWebServer.class); } catch (NoSuchMethodException e) { logger.error("Constructor not found in " + pluginClass); continue; } Object obj = null; try { obj = constructor.newInstance(pluginName, pluginProps, root, azkabanWebApp); } catch (Exception e) { logger.error(e); } if (!(obj instanceof TriggerPlugin)) { logger.error("The object is not an TriggerPlugin"); continue; } TriggerPlugin plugin = (TriggerPlugin) obj; installedTriggerPlugins.put(pluginName, plugin); } // Velocity needs the jar resource paths to be set. String jarResourcePath = StringUtils.join(jarPaths, ", "); logger.info("Setting jar resource path " + jarResourcePath); VelocityEngine ve = azkabanWebApp.getVelocityEngine(); ve.addProperty("jar.resource.loader.path", jarResourcePath); return installedTriggerPlugins; }