List of usage examples for java.net URLClassLoader URLClassLoader
URLClassLoader(URL[] urls, AccessControlContext acc)
From source file:org.pentaho.di.job.entries.hadoopjobexecutor.JobEntryHadoopJobExecutor.java
public Result execute(final Result result, int arg1) throws KettleException { result.setNrErrors(0);//from w w w . j av a 2 s. com Log4jFileAppender appender = null; String logFileName = "pdi-" + this.getName(); //$NON-NLS-1$ try { appender = LogWriter.createFileAppender(logFileName, true, false); LogWriter.getInstance().addAppender(appender); log.setLogLevel(parentJob.getLogLevel()); } catch (Exception e) { logError(BaseMessages.getString(PKG, "JobEntryHadoopJobExecutor.FailedToOpenLogFile", logFileName, //$NON-NLS-1$ e.toString())); logError(Const.getStackTracker(e)); } try { URL resolvedJarUrl = resolveJarUrl(jarUrl); if (log.isDetailed()) { logDetailed(BaseMessages.getString(PKG, "JobEntryHadoopJobExecutor.ResolvedJar", resolvedJarUrl.toExternalForm())); } HadoopShim shim = getHadoopConfiguration().getHadoopShim(); if (isSimple) { String simpleLoggingIntervalS = environmentSubstitute(getSimpleLoggingInterval()); int simpleLogInt = 60; try { simpleLogInt = Integer.parseInt(simpleLoggingIntervalS, 10); } catch (NumberFormatException e) { logError(BaseMessages.getString(PKG, "ErrorParsingLogInterval", simpleLoggingIntervalS, simpleLogInt)); } final Class<?> mainClass = locateDriverClass(resolvedJarUrl, shim); if (log.isDetailed()) { logDetailed(BaseMessages.getString(PKG, "JobEntryHadoopJobExecutor.UsingDriverClass", mainClass == null ? "null" : mainClass.getName())); logDetailed(BaseMessages.getString(PKG, "JobEntryHadoopJobExecutor.SimpleMode")); } final AtomicInteger threads = new AtomicInteger(1); final NoExitSecurityManager nesm = new NoExitSecurityManager(System.getSecurityManager()); smStack.setSecurityManager(nesm); try { Runnable r = new Runnable() { public void run() { try { try { executeMainMethod(mainClass); } finally { restoreSecurityManager(threads, nesm); } } catch (NoExitSecurityManager.NoExitSecurityException ex) { // Only log if we're blocking and waiting for this to complete if (simpleBlocking) { logExitStatus(result, mainClass, ex); } } catch (InvocationTargetException ex) { if (ex.getTargetException() instanceof NoExitSecurityManager.NoExitSecurityException) { // Only log if we're blocking and waiting for this to complete if (simpleBlocking) { logExitStatus(result, mainClass, (NoExitSecurityManager.NoExitSecurityException) ex .getTargetException()); } } else { throw new RuntimeException(ex); } } catch (Exception ex) { throw new RuntimeException(ex); } } }; Thread t = new Thread(r); t.setDaemon(true); t.setUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() { @Override public void uncaughtException(Thread t, Throwable e) { restoreSecurityManager(threads, nesm); if (simpleBlocking) { // Only log if we're blocking and waiting for this to complete logError(BaseMessages.getString(JobEntryHadoopJobExecutor.class, "JobEntryHadoopJobExecutor.ErrorExecutingClass", mainClass.getName()), e); result.setResult(false); } } }); nesm.addBlockedThread(t); t.start(); if (simpleBlocking) { // wait until the thread is done do { logDetailed(BaseMessages.getString(JobEntryHadoopJobExecutor.class, "JobEntryHadoopJobExecutor.Blocking", mainClass.getName())); t.join(simpleLogInt * 1000); } while (!parentJob.isStopped() && t.isAlive()); if (t.isAlive()) { // Kill thread if it's still running. The job must have been stopped. t.interrupt(); } } } finally { // If we're not performing simple blocking spawn a watchdog thread to restore the security manager when all // threads are complete if (!simpleBlocking) { Runnable threadWatchdog = new Runnable() { @Override public void run() { while (threads.get() > 0) { try { Thread.sleep(100); } catch (InterruptedException e) { /* ignore */ } } restoreSecurityManager(threads, nesm); } }; Thread watchdog = new Thread(threadWatchdog); watchdog.setDaemon(true); watchdog.start(); } } } else { if (log.isDetailed()) { logDetailed(BaseMessages.getString(PKG, "JobEntryHadoopJobExecutor.AdvancedMode")); } Configuration conf = shim.createConfiguration(); FileSystem fs = shim.getFileSystem(conf); URL[] urls = new URL[] { resolvedJarUrl }; URLClassLoader loader = new URLClassLoader(urls, shim.getClass().getClassLoader()); String hadoopJobNameS = environmentSubstitute(hadoopJobName); conf.setJobName(hadoopJobNameS); String outputKeyClassS = environmentSubstitute(outputKeyClass); conf.setOutputKeyClass(loader.loadClass(outputKeyClassS)); String outputValueClassS = environmentSubstitute(outputValueClass); conf.setOutputValueClass(loader.loadClass(outputValueClassS)); if (mapperClass != null) { String mapperClassS = environmentSubstitute(mapperClass); Class<?> mapper = loader.loadClass(mapperClassS); conf.setMapperClass(mapper); } if (combinerClass != null) { String combinerClassS = environmentSubstitute(combinerClass); Class<?> combiner = loader.loadClass(combinerClassS); conf.setCombinerClass(combiner); } if (reducerClass != null) { String reducerClassS = environmentSubstitute(reducerClass); Class<?> reducer = loader.loadClass(reducerClassS); conf.setReducerClass(reducer); } if (inputFormatClass != null) { String inputFormatClassS = environmentSubstitute(inputFormatClass); Class<?> inputFormat = loader.loadClass(inputFormatClassS); conf.setInputFormat(inputFormat); } if (outputFormatClass != null) { String outputFormatClassS = environmentSubstitute(outputFormatClass); Class<?> outputFormat = loader.loadClass(outputFormatClassS); conf.setOutputFormat(outputFormat); } String hdfsHostnameS = environmentSubstitute(hdfsHostname); String hdfsPortS = environmentSubstitute(hdfsPort); String jobTrackerHostnameS = environmentSubstitute(jobTrackerHostname); String jobTrackerPortS = environmentSubstitute(jobTrackerPort); List<String> configMessages = new ArrayList<String>(); shim.configureConnectionInformation(hdfsHostnameS, hdfsPortS, jobTrackerHostnameS, jobTrackerPortS, conf, configMessages); for (String m : configMessages) { logBasic(m); } String inputPathS = environmentSubstitute(inputPath); String[] inputPathParts = inputPathS.split(","); List<Path> paths = new ArrayList<Path>(); for (String path : inputPathParts) { paths.add(fs.asPath(conf.getDefaultFileSystemURL(), path)); } Path[] finalPaths = paths.toArray(new Path[paths.size()]); conf.setInputPaths(finalPaths); String outputPathS = environmentSubstitute(outputPath); conf.setOutputPath(fs.asPath(conf.getDefaultFileSystemURL(), outputPathS)); // process user defined values for (UserDefinedItem item : userDefined) { if (item.getName() != null && !"".equals(item.getName()) && item.getValue() != null && !"".equals(item.getValue())) { String nameS = environmentSubstitute(item.getName()); String valueS = environmentSubstitute(item.getValue()); conf.set(nameS, valueS); } } conf.setJar(environmentSubstitute(jarUrl)); String numMapTasksS = environmentSubstitute(numMapTasks); String numReduceTasksS = environmentSubstitute(numReduceTasks); int numM = 1; try { numM = Integer.parseInt(numMapTasksS); } catch (NumberFormatException e) { logError("Can't parse number of map tasks '" + numMapTasksS + "'. Setting num" + "map tasks to 1"); } int numR = 1; try { numR = Integer.parseInt(numReduceTasksS); } catch (NumberFormatException e) { logError("Can't parse number of reduce tasks '" + numReduceTasksS + "'. Setting num" + "reduce tasks to 1"); } conf.setNumMapTasks(numM); conf.setNumReduceTasks(numR); RunningJob runningJob = shim.submitJob(conf); String loggingIntervalS = environmentSubstitute(getLoggingInterval()); int logIntv = 60; try { logIntv = Integer.parseInt(loggingIntervalS); } catch (NumberFormatException e) { logError(BaseMessages.getString(PKG, "ErrorParsingLogInterval", loggingIntervalS, logIntv)); } if (blocking) { try { int taskCompletionEventIndex = 0; while (!parentJob.isStopped() && !runningJob.isComplete()) { if (logIntv >= 1) { printJobStatus(runningJob); taskCompletionEventIndex = logTaskMessages(runningJob, taskCompletionEventIndex); Thread.sleep(logIntv * 1000); } else { Thread.sleep(60000); } } if (parentJob.isStopped() && !runningJob.isComplete()) { // We must stop the job running on Hadoop runningJob.killJob(); // Indicate this job entry did not complete result.setResult(false); } printJobStatus(runningJob); // Log any messages we may have missed while polling logTaskMessages(runningJob, taskCompletionEventIndex); } catch (InterruptedException ie) { logError(ie.getMessage(), ie); } // Entry is successful if the MR job is successful overall result.setResult(runningJob.isSuccessful()); } } } catch (Throwable t) { t.printStackTrace(); result.setStopped(true); result.setNrErrors(1); result.setResult(false); logError(t.getMessage(), t); } if (appender != null) { LogWriter.getInstance().removeAppender(appender); appender.close(); ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_LOG, appender.getFile(), parentJob.getJobname(), getName()); result.getResultFiles().put(resultFile.getFile().toString(), resultFile); } return result; }
From source file:com.liferay.maven.plugins.AbstractLiferayMojo.java
protected ClassLoader toClassLoader(List<String> classPath) throws Exception { List<URL> urls = new ArrayList<URL>(); for (String path : classPath) { urls.add(new URL(path)); }// www. java2s.c om return new URLClassLoader(urls.toArray(new URL[urls.size()]), null); }
From source file:com.asakusafw.runtime.util.hadoop.ConfigurationProvider.java
private static ClassLoader createLoader(ClassLoader current, URL defaultConfigPath) { assert current != null; if (defaultConfigPath == null) { return current; }/*from w ww. j a va 2s . c om*/ ClassLoader cached = null; String configPath = defaultConfigPath.toExternalForm(); synchronized (CACHE_CLASS_LOADER) { ClassLoaderHolder holder = CACHE_CLASS_LOADER.get(current); if (holder != null) { cached = holder.get(); if (cached != null && holder.configPath.equals(configPath)) { return cached; } } } ClassLoader ehnahced = AccessController.doPrivileged( (PrivilegedAction<ClassLoader>) () -> new URLClassLoader(new URL[] { defaultConfigPath }, current)); synchronized (CACHE_CLASS_LOADER) { CACHE_CLASS_LOADER.put(current, new ClassLoaderHolder(ehnahced, configPath)); } return ehnahced; }
From source file:org.mitre.ccv.weka.mapred.ClassifyInstances.java
@Override public int run(String[] args) throws Exception { JobConf conf = new JobConf(getConf()); ArrayList<String> other_args = new ArrayList<String>(); for (int i = 0; i < args.length; ++i) { try {//from w w w . j a va 2s . c o m if ("-m".equals(args[i])) { conf.setNumMapTasks(Integer.parseInt(args[++i])); } else if ("-r".equals(args[i])) { conf.setNumReduceTasks(Integer.parseInt(args[++i])); } else if ("-D".equals(args[i])) { String[] props = args[++i].split("="); conf.set(props[0], props[1]); } else if ("-libjars".equals(args[i])) { conf.set("tmpjars", FileUtils.validateFiles(args[++i], conf)); URL[] libjars = FileUtils.getLibJars(conf); if (libjars != null && libjars.length > 0) { // Add libjars to client/tasks classpath conf.setClassLoader(new URLClassLoader(libjars, conf.getClassLoader())); // Adds libjars to our classpath Thread.currentThread().setContextClassLoader( new URLClassLoader(libjars, Thread.currentThread().getContextClassLoader())); } } else if ("-C".equals(args[i])) { conf.set(CLASSIFIER, args[++i]); } else { other_args.add(args[i]); } } catch (NumberFormatException except) { System.out.println("ERROR: Integer expected instead of " + args[i]); return printUsage(); } catch (ArrayIndexOutOfBoundsException except) { System.out.println("ERROR: Required parameter missing from " + args[i - 1]); return printUsage(); } } // Make sure there are exactly 3 parameters left. if (other_args.size() != 3) { System.out.println("ERROR: Wrong number of parameters: " + other_args.size() + " instead of 3."); return printUsage(); } return initJob(conf, other_args.get(0), other_args.get(1), other_args.get(2)); }
From source file:gdt.jgui.entity.query.JQueryPanel.java
private String[] select() { try {/*from w ww. j a v a 2s . com*/ Entigrator entigrator = console.getEntigrator(entihome$); Sack query = entigrator.getEntityAtKey(entityKey$); //String queryClass$=query.getElementItemAt("parameter", "query.class"); File queryHome = new File(entihome$ + "/" + entityKey$); URL url = queryHome.toURI().toURL(); URL[] urls = new URL[] { url }; ClassLoader parentLoader = JMainConsole.class.getClassLoader(); URLClassLoader cl = new URLClassLoader(urls, parentLoader); Class<?> cls = cl.loadClass(entityKey$); Object obj = cls.newInstance(); // Method method = obj.getClass().getDeclaredMethod("select",JMainConsole.class,String.class); Method method = obj.getClass().getDeclaredMethod("select", JMainConsole.class, String.class); Object value = method.invoke(obj, console, entihome$); String[] sa = (String[]) value; String[] ea = query.elementList("exclude"); if (ea == null) return sa; else { ArrayList<String> sl = new ArrayList<String>(); String label$; for (String s : sa) { label$ = entigrator.indx_getLabel(s); if (query.getElementItem("exclude", label$) == null) sl.add(s); } return sl.toArray(new String[0]); } } catch (Exception e) { Logger.getLogger(JQueryPanel.class.getName()).severe(e.toString()); } return null; }
From source file:org.mitre.ccv.mapred.CompleteCompositionVectorUtils.java
@Override public int run(String[] args) throws Exception { JobConf conf = new JobConf(getConf()); ArrayList<String> other_args = new ArrayList<String>(); for (int i = 0; i < args.length; ++i) { try {//from ww w. j a v a 2 s . c om if ("-m".equals(args[i])) { conf.setNumMapTasks(Integer.parseInt(args[++i])); } else if ("-r".equals(args[i])) { conf.setNumReduceTasks(Integer.parseInt(args[++i])); } else if ("-libjars".equals(args[i])) { conf.set("tmpjars", FileUtils.validateFiles(args[++i], conf)); URL[] libjars = FileUtils.getLibJars(conf); if (libjars != null && libjars.length > 0) { // Add libjars to client/tasks classpath conf.setClassLoader(new URLClassLoader(libjars, conf.getClassLoader())); // Adds libjars to our classpath Thread.currentThread().setContextClassLoader( new URLClassLoader(libjars, Thread.currentThread().getContextClassLoader())); } } else { other_args.add(args[i]); } } catch (NumberFormatException except) { System.out.println("ERROR: Integer expected instead of " + args[i]); return printUsage(); } catch (ArrayIndexOutOfBoundsException except) { System.out.println("ERROR: Required parameter missing from " + args[i - 1]); return printUsage(); } } // Make sure there are exactly 2 parameters left. if (other_args.size() < 1) { System.out.println("ERROR: Require ONE argument!"); return printUsage(); } String cmd = other_args.get(0); if (cmd.equals("featureVectors2Json")) { if (other_args.size() >= 7) { try { int start = Integer.parseInt(other_args.get(1)); int end = Integer.parseInt(other_args.get(2)); int kmers = Integer.parseInt(other_args.get(3)); featureVectors2Json(conf, start, end, kmers, other_args.get(4), other_args.get(5), other_args.get(6)); } catch (NumberFormatException except) { System.err.println("Woops. Error converting number!"); return -1; } } else { System.err.println("We need more arguments!"); return -1; } } else if (cmd.equals("featureVectors2rows")) { int digits = 6; if (other_args.size() > 3) { try { digits = Integer.parseInt(other_args.get(1)); featureVectors2RowMajorMatrix(conf, other_args.get(2), other_args.get(3), digits); } catch (NumberFormatException except) { System.err.println("Woops. Error converting number!"); return -1; } } else { featureVectors2RowMajorMatrix(conf, other_args.get(1), other_args.get(2), digits); } } else { System.out.println("Unknown command:" + cmd); return -1; } return 0; }
From source file:org.italiangrid.voms.container.Container.java
private void forceTaglibsLoading() { if (System.getProperty("voms.disableTaglibsLoading") != null) { log.warn("Taglibs loading disabled, as requested by voms.disableTaglibsLoading"); return;/*from ww w. ja v a 2 s . co m*/ } try { String classpath = java.lang.System.getProperty("java.class.path"); String entries[] = classpath.split(System.getProperty("path.separator")); if (entries.length >= 1) { JarFile f = new JarFile(entries[0]); Attributes attrs = f.getManifest().getMainAttributes(); Name n = new Name("Class-Path"); String jarClasspath = attrs.getValue(n); String jarEntries[] = jarClasspath.split(" "); boolean taglibsFound = false; for (String e : jarEntries) { if (e.contains(TAGLIBS_JAR_NAME)) { taglibsFound = true; ClassLoader currentClassLoader = Thread.currentThread().getContextClassLoader(); File taglibsJar = new File(e); URLClassLoader newClassLoader = new URLClassLoader(new URL[] { taglibsJar.toURI().toURL() }, currentClassLoader); Thread.currentThread().setContextClassLoader(newClassLoader); } } f.close(); if (!taglibsFound) { throw new RuntimeException("Error configuring taglibs classloading!"); } } } catch (IOException e) { log.error(e.getMessage(), e); System.exit(1); } }
From source file:com.flipkart.aesop.runtime.spring.RuntimeComponentContainer.java
/** * Loads the runtime context from path specified in the ServerContainerConfigInfo. Looks for file by name {@link RuntimeComponentContainer#getRuntimeConfigFileName()}. * @param serverContainerConfigInfo containing absolute path to the runtime's configuration location i.e. folder */// w ww .ja v a2 s. com private void loadRuntimeContext(ServerContainerConfigInfo serverContainerConfigInfo) { // check if a context exists already for this config path for (ServerContainerConfigInfo loadedRuntimeConfigInfo : this.runtimeConfigInfoList) { if (loadedRuntimeConfigInfo.equals(serverContainerConfigInfo)) { serverContainerConfigInfo = loadedRuntimeConfigInfo; break; } } if (serverContainerConfigInfo.getRuntimeContext() != null) { // close the context and remove from list serverContainerConfigInfo.getRuntimeContext().close(); this.runtimeConfigInfoList.remove(serverContainerConfigInfo); } ClassLoader runtimeCL = this.tccl; // check to see if the runtime and dependent binaries are deployed outside of the runtime class path. If yes, include them using a custom URL classloader. File customLibPath = new File(serverContainerConfigInfo.getXmlConfigFile().getParentFile(), ServerContainerConfigInfo.BINARIES_PATH); if (customLibPath.exists() && customLibPath.isDirectory()) { try { File[] libFiles = customLibPath.listFiles(); URL[] libURLs = new URL[libFiles.length]; for (int i = 0; i < libFiles.length; i++) { libURLs[i] = new URL(ServerContainerConfigInfo.FILE_PREFIX + libFiles[i].getAbsolutePath()); } runtimeCL = new URLClassLoader(libURLs, this.tccl); } catch (MalformedURLException e) { throw new PlatformException(e); } } // now load the runtime context and add it into the serverContainerConfigInfo list serverContainerConfigInfo.loadRuntimeContext(runtimeCL, RuntimeComponentContainer.getCommonRuntimeBeansContext()); this.runtimeConfigInfoList.add(serverContainerConfigInfo); }
From source file:com.aliyun.openservices.odps.console.utils.CommandParserUtils.java
public static void loadPlugins() { List<URL> pluginJarList = PluginUtil.getPluginsJarList(); URL[] urls = (URL[]) pluginJarList.toArray(new URL[pluginJarList.size()]); classLoader = new URLClassLoader(urls, Thread.currentThread().getContextClassLoader()); }
From source file:org.apache.nifi.spring.SpringContextProcessor.java
/** * *//*from w ww. j a va 2 s . com*/ private static boolean isConfigResolvable(String configPath, File libDirPathFile) { List<URL> urls = new ArrayList<>(); URLClassLoader parentLoader = (URLClassLoader) SpringContextProcessor.class.getClassLoader(); urls.addAll(Arrays.asList(parentLoader.getURLs())); urls.addAll(SpringContextFactory.gatherAdditionalClassPathUrls(libDirPathFile.getAbsolutePath())); boolean resolvable = false; try (URLClassLoader throwawayCl = new URLClassLoader(urls.toArray(new URL[] {}), null)) { resolvable = throwawayCl.findResource(configPath) != null; } catch (IOException e) { // ignore since it can only happen on CL.close() } return resolvable; }