List of usage examples for java.net URLClassLoader URLClassLoader
public URLClassLoader(URL[] urls)
From source file:it.crs4.pydoop.pipes.Submitter.java
@Override public int run(String[] args) throws Exception { CommandLineParser cli = new CommandLineParser(); if (args.length == 0) { cli.printUsage();//from w w w .j a v a 2s . c o m return 1; } cli.addOption("input", false, "input path to the maps", "path"); cli.addOption("output", false, "output path from the reduces", "path"); cli.addOption("jar", false, "job jar file", "path"); cli.addOption("inputformat", false, "java classname of InputFormat", "class"); //cli.addArgument("javareader", false, "is the RecordReader in Java"); cli.addOption("map", false, "java classname of Mapper", "class"); cli.addOption("partitioner", false, "java classname of Partitioner", "class"); cli.addOption("reduce", false, "java classname of Reducer", "class"); cli.addOption("writer", false, "java classname of OutputFormat", "class"); cli.addOption("program", false, "URI to application executable", "class"); cli.addOption("reduces", false, "number of reduces", "num"); cli.addOption("jobconf", false, "\"n1=v1,n2=v2,..\" (Deprecated) Optional. Add or override a JobConf property.", "key=val"); cli.addOption("lazyOutput", false, "Optional. Create output lazily", "boolean"); Parser parser = cli.createParser(); try { GenericOptionsParser genericParser = new GenericOptionsParser(getConf(), args); CommandLine results = parser.parse(cli.options, genericParser.getRemainingArgs()); JobConf job = new JobConf(getConf()); if (results.hasOption("input")) { FileInputFormat.setInputPaths(job, results.getOptionValue("input")); } if (results.hasOption("output")) { FileOutputFormat.setOutputPath(job, new Path(results.getOptionValue("output"))); } if (results.hasOption("jar")) { job.setJar(results.getOptionValue("jar")); } if (results.hasOption("inputformat")) { setIsJavaRecordReader(job, true); job.setInputFormat(getClass(results, "inputformat", job, InputFormat.class)); } if (results.hasOption("javareader")) { setIsJavaRecordReader(job, true); } if (results.hasOption("map")) { setIsJavaMapper(job, true); job.setMapperClass(getClass(results, "map", job, Mapper.class)); } if (results.hasOption("partitioner")) { job.setPartitionerClass(getClass(results, "partitioner", job, Partitioner.class)); } if (results.hasOption("reduce")) { setIsJavaReducer(job, true); job.setReducerClass(getClass(results, "reduce", job, Reducer.class)); } if (results.hasOption("reduces")) { job.setNumReduceTasks(Integer.parseInt(results.getOptionValue("reduces"))); } if (results.hasOption("writer")) { setIsJavaRecordWriter(job, true); job.setOutputFormat(getClass(results, "writer", job, OutputFormat.class)); } if (results.hasOption("lazyOutput")) { if (Boolean.parseBoolean(results.getOptionValue("lazyOutput"))) { LazyOutputFormat.setOutputFormatClass(job, job.getOutputFormat().getClass()); } } if (results.hasOption("program")) { setExecutable(job, results.getOptionValue("program")); } if (results.hasOption("jobconf")) { LOG.warn("-jobconf option is deprecated, please use -D instead."); String options = results.getOptionValue("jobconf"); StringTokenizer tokenizer = new StringTokenizer(options, ","); while (tokenizer.hasMoreTokens()) { String keyVal = tokenizer.nextToken().trim(); String[] keyValSplit = keyVal.split("="); job.set(keyValSplit[0], keyValSplit[1]); } } // if they gave us a jar file, include it into the class path String jarFile = job.getJar(); if (jarFile != null) { final URL[] urls = new URL[] { FileSystem.getLocal(job).pathToFile(new Path(jarFile)).toURL() }; //FindBugs complains that creating a URLClassLoader should be //in a doPrivileged() block. ClassLoader loader = AccessController.doPrivileged(new PrivilegedAction<ClassLoader>() { public ClassLoader run() { return new URLClassLoader(urls); } }); job.setClassLoader(loader); } runJob(job); return 0; } catch (ParseException pe) { LOG.info("Error : " + pe); cli.printUsage(); return 1; } }
From source file:com.cloudera.impala.catalog.CatalogServiceCatalog.java
/** * Returns a list of Impala Functions, one per compatible "evaluate" method in the UDF * class referred to by the given Java function. This method copies the UDF Jar * referenced by "function" to a temporary file in "LOCAL_LIBRARY_PATH" and loads it * into the jvm. Then we scan all the methods in the class using reflection and extract * those methods and create corresponding Impala functions. Currently Impala supports * only "JAR" files for symbols and also a single Jar containing all the dependent * classes rather than a set of Jar files. *///from w w w.j av a 2s.c o m public static List<Function> extractFunctions(String db, org.apache.hadoop.hive.metastore.api.Function function) throws ImpalaRuntimeException { List<Function> result = Lists.newArrayList(); List<String> addedSignatures = Lists.newArrayList(); boolean compatible = true; StringBuilder warnMessage = new StringBuilder(); if (function.getFunctionType() != FunctionType.JAVA) { compatible = false; warnMessage.append("Function type: " + function.getFunctionType().name() + " is not supported. Only " + FunctionType.JAVA.name() + " functions " + "are supported."); } if (function.getResourceUrisSize() != 1) { compatible = false; List<String> resourceUris = Lists.newArrayList(); for (ResourceUri resource : function.getResourceUris()) { resourceUris.add(resource.getUri()); } warnMessage.append("Impala does not support multiple Jars for dependencies." + "(" + Joiner.on(",").join(resourceUris) + ") "); } if (function.getResourceUris().get(0).getResourceType() != ResourceType.JAR) { compatible = false; warnMessage.append("Function binary type: " + function.getResourceUris().get(0).getResourceType().name() + " is not supported. Only " + ResourceType.JAR.name() + " type is supported."); } if (!compatible) { LOG.warn("Skipping load of incompatible Java function: " + function.getFunctionName() + ". " + warnMessage.toString()); return result; } String jarUri = function.getResourceUris().get(0).getUri(); Class<?> udfClass = null; try { Path localJarPath = new Path(LOCAL_LIBRARY_PATH, UUID.randomUUID().toString() + ".jar"); if (!FileSystemUtil.copyToLocal(new Path(jarUri), localJarPath)) { String errorMsg = "Error loading Java function: " + db + "." + function.getFunctionName() + ". Couldn't copy " + jarUri + " to local path: " + localJarPath.toString(); LOG.error(errorMsg); throw new ImpalaRuntimeException(errorMsg); } URL[] classLoaderUrls = new URL[] { new URL(localJarPath.toString()) }; URLClassLoader urlClassLoader = new URLClassLoader(classLoaderUrls); udfClass = urlClassLoader.loadClass(function.getClassName()); // Check if the class is of UDF type. Currently we don't support other functions // TODO: Remove this once we support Java UDAF/UDTF if (FunctionUtils.getUDFClassType(udfClass) != FunctionUtils.UDFClassType.UDF) { LOG.warn("Ignoring load of incompatible Java function: " + function.getFunctionName() + " as " + FunctionUtils.getUDFClassType(udfClass) + " is not a supported type. Only UDFs are supported"); return result; } // Load each method in the UDF class and create the corresponding Impala Function // object. for (Method m : udfClass.getMethods()) { if (!m.getName().equals("evaluate")) continue; Function fn = ScalarFunction.fromHiveFunction(db, function.getFunctionName(), function.getClassName(), m.getParameterTypes(), m.getReturnType(), jarUri); if (fn == null) { LOG.warn("Ignoring incompatible method: " + m.toString() + " during load of " + "Hive UDF:" + function.getFunctionName() + " from " + udfClass); continue; } if (!addedSignatures.contains(fn.signatureString())) { result.add(fn); addedSignatures.add(fn.signatureString()); } } } catch (ClassNotFoundException c) { String errorMsg = "Error loading Java function: " + db + "." + function.getFunctionName() + ". Symbol class " + udfClass + "not found in Jar: " + jarUri; LOG.error(errorMsg); throw new ImpalaRuntimeException(errorMsg, c); } catch (Exception e) { LOG.error("Skipping function load: " + function.getFunctionName(), e); throw new ImpalaRuntimeException("Error extracting functions", e); } return result; }
From source file:it.crs4.pydoop.mapreduce.pipes.CommandLineParser.java
public int run(String[] args) throws Exception { CommandLineParser cli = new CommandLineParser(); if (args.length == 0) { cli.printUsage();/*from w w w. ja va 2 s. c o m*/ return 1; } try { Job job = new Job(new Configuration()); job.setJobName(getClass().getName()); Configuration conf = job.getConfiguration(); CommandLine results = cli.parse(conf, args); if (results.hasOption("input")) { Path path = new Path(results.getOptionValue("input")); FileInputFormat.setInputPaths(job, path); } if (results.hasOption("output")) { Path path = new Path(results.getOptionValue("output")); FileOutputFormat.setOutputPath(job, path); } if (results.hasOption("jar")) { job.setJar(results.getOptionValue("jar")); } if (results.hasOption("inputformat")) { explicitInputFormat = true; setIsJavaRecordReader(conf, true); job.setInputFormatClass(getClass(results, "inputformat", conf, InputFormat.class)); } if (results.hasOption("javareader")) { setIsJavaRecordReader(conf, true); } if (results.hasOption("map")) { setIsJavaMapper(conf, true); job.setMapperClass(getClass(results, "map", conf, Mapper.class)); } if (results.hasOption("partitioner")) { job.setPartitionerClass(getClass(results, "partitioner", conf, Partitioner.class)); } if (results.hasOption("reduce")) { setIsJavaReducer(conf, true); job.setReducerClass(getClass(results, "reduce", conf, Reducer.class)); } if (results.hasOption("reduces")) { job.setNumReduceTasks(Integer.parseInt(results.getOptionValue("reduces"))); } if (results.hasOption("writer")) { explicitOutputFormat = true; setIsJavaRecordWriter(conf, true); job.setOutputFormatClass(getClass(results, "writer", conf, OutputFormat.class)); } if (results.hasOption("lazyOutput")) { if (Boolean.parseBoolean(results.getOptionValue("lazyOutput"))) { LazyOutputFormat.setOutputFormatClass(job, job.getOutputFormatClass()); } } if (results.hasOption("avroInput")) { avroInput = AvroIO.valueOf(results.getOptionValue("avroInput").toUpperCase()); } if (results.hasOption("avroOutput")) { avroOutput = AvroIO.valueOf(results.getOptionValue("avroOutput").toUpperCase()); } if (results.hasOption("program")) { setExecutable(conf, results.getOptionValue("program")); } // if they gave us a jar file, include it into the class path String jarFile = job.getJar(); if (jarFile != null) { final URL[] urls = new URL[] { FileSystem.getLocal(conf).pathToFile(new Path(jarFile)).toURL() }; // FindBugs complains that creating a URLClassLoader should be // in a doPrivileged() block. ClassLoader loader = AccessController.doPrivileged(new PrivilegedAction<ClassLoader>() { public ClassLoader run() { return new URLClassLoader(urls); } }); conf.setClassLoader(loader); } setupPipesJob(job); return job.waitForCompletion(true) ? 0 : 1; } catch (ParseException pe) { LOG.info("Error : " + pe); cli.printUsage(); return 1; } }
From source file:io.fabric8.forge.camel.commands.project.AbstractCamelProjectCommand.java
public static Properties loadComponentProperties(Dependency dependency) { Properties answer = new Properties(); try {/*from w w w .ja v a2s.c om*/ // is it a JAR file File file = dependency.getArtifact().getUnderlyingResourceObject(); if (file != null && file.getName().toLowerCase().endsWith(".jar")) { URL url = new URL("file:" + file.getAbsolutePath()); URLClassLoader child = new URLClassLoader(new URL[] { url }); InputStream is = child .getResourceAsStream("META-INF/services/org/apache/camel/component.properties"); if (is != null) { answer.load(is); } } } catch (Throwable e) { // ignore } return answer; }
From source file:org.apache.impala.catalog.CatalogServiceCatalog.java
/** * Returns a list of Impala Functions, one per compatible "evaluate" method in the UDF * class referred to by the given Java function. This method copies the UDF Jar * referenced by "function" to a temporary file in localLibraryPath_ and loads it * into the jvm. Then we scan all the methods in the class using reflection and extract * those methods and create corresponding Impala functions. Currently Impala supports * only "JAR" files for symbols and also a single Jar containing all the dependent * classes rather than a set of Jar files. *//*from w w w . j a v a2s . c om*/ public static List<Function> extractFunctions(String db, org.apache.hadoop.hive.metastore.api.Function function) throws ImpalaRuntimeException { List<Function> result = Lists.newArrayList(); List<String> addedSignatures = Lists.newArrayList(); StringBuilder warnMessage = new StringBuilder(); if (!isFunctionCompatible(function, warnMessage)) { LOG.warn("Skipping load of incompatible function: " + function.getFunctionName() + ". " + warnMessage.toString()); return result; } String jarUri = function.getResourceUris().get(0).getUri(); Class<?> udfClass = null; Path localJarPath = null; try { localJarPath = new Path(localLibraryPath_, UUID.randomUUID().toString() + ".jar"); try { FileSystemUtil.copyToLocal(new Path(jarUri), localJarPath); } catch (IOException e) { String errorMsg = "Error loading Java function: " + db + "." + function.getFunctionName() + ". Couldn't copy " + jarUri + " to local path: " + localJarPath.toString(); LOG.error(errorMsg, e); throw new ImpalaRuntimeException(errorMsg); } URL[] classLoaderUrls = new URL[] { new URL(localJarPath.toString()) }; URLClassLoader urlClassLoader = new URLClassLoader(classLoaderUrls); udfClass = urlClassLoader.loadClass(function.getClassName()); // Check if the class is of UDF type. Currently we don't support other functions // TODO: Remove this once we support Java UDAF/UDTF if (FunctionUtils.getUDFClassType(udfClass) != FunctionUtils.UDFClassType.UDF) { LOG.warn("Ignoring load of incompatible Java function: " + function.getFunctionName() + " as " + FunctionUtils.getUDFClassType(udfClass) + " is not a supported type. Only UDFs are supported"); return result; } // Load each method in the UDF class and create the corresponding Impala Function // object. for (Method m : udfClass.getMethods()) { if (!m.getName().equals(UdfExecutor.UDF_FUNCTION_NAME)) continue; Function fn = ScalarFunction.fromHiveFunction(db, function.getFunctionName(), function.getClassName(), m.getParameterTypes(), m.getReturnType(), jarUri); if (fn == null) { LOG.warn("Ignoring incompatible method: " + m.toString() + " during load of " + "Hive UDF:" + function.getFunctionName() + " from " + udfClass); continue; } if (!addedSignatures.contains(fn.signatureString())) { result.add(fn); addedSignatures.add(fn.signatureString()); } } } catch (ClassNotFoundException c) { String errorMsg = "Error loading Java function: " + db + "." + function.getFunctionName() + ". Symbol class " + udfClass + "not found in Jar: " + jarUri; LOG.error(errorMsg); throw new ImpalaRuntimeException(errorMsg, c); } catch (Exception e) { LOG.error("Skipping function load: " + function.getFunctionName(), e); throw new ImpalaRuntimeException("Error extracting functions", e); } catch (LinkageError e) { String errorMsg = "Error resolving dependencies for Java function: " + db + "." + function.getFunctionName(); LOG.error(errorMsg); throw new ImpalaRuntimeException(errorMsg, e); } finally { if (localJarPath != null) FileSystemUtil.deleteIfExists(localJarPath); } return result; }
From source file:com.aurel.track.lucene.util.StringUtilTest.java
/** * Run the String read(ClassLoader,String) method test. * * @throws Exception//w w w. ja v a 2 s . c o m * * @generatedBy CodePro at 13.04.15 23:38 */ @Test public void testRead_5() throws Exception { ClassLoader classLoader = new URLClassLoader(new URL[] {}); String name = ""; String result = StringUtil.read(classLoader, name); // add additional test code here // An unexpected exception was thrown in user code while executing this test: // java.lang.NullPointerException // at java.io.Reader.<init>(Reader.java:78) // at java.io.InputStreamReader.<init>(InputStreamReader.java:72) // at com.aurel.track.lucene.util.StringUtil.read(StringUtil.java:216) // at com.aurel.track.lucene.util.StringUtil.read(StringUtil.java:212) assertNotNull(result); }
From source file:io.fabric8.forge.camel.commands.project.AbstractCamelProjectCommand.java
public static String loadComponentJSonSchema(Dependency dependency, String scheme) { String answer = null;/* w w w . jav a 2s .com*/ String path = null; String javaType = extractComponentJavaType(dependency, scheme); if (javaType != null) { int pos = javaType.lastIndexOf("."); path = javaType.substring(0, pos); path = path.replace('.', '/'); path = path + "/" + scheme + ".json"; } if (path != null) { try { // is it a JAR file File file = dependency.getArtifact().getUnderlyingResourceObject(); if (file != null && file.getName().toLowerCase().endsWith(".jar")) { URL url = new URL("file:" + file.getAbsolutePath()); URLClassLoader child = new URLClassLoader(new URL[] { url }); InputStream is = child.getResourceAsStream(path); if (is != null) { answer = loadText(is); } } } catch (Throwable e) { // ignore } } return answer; }
From source file:com.aurel.track.lucene.util.StringUtilTest.java
/** * Run the String read(ClassLoader,String) method test. * * @throws Exception/*w ww . jav a 2 s . c om*/ * * @generatedBy CodePro at 13.04.15 23:38 */ @Test public void testRead_6() throws Exception { ClassLoader classLoader = new URLClassLoader(new URL[] {}); String name = ""; String result = StringUtil.read(classLoader, name); // add additional test code here // An unexpected exception was thrown in user code while executing this test: // java.lang.NullPointerException // at java.io.Reader.<init>(Reader.java:78) // at java.io.InputStreamReader.<init>(InputStreamReader.java:72) // at com.aurel.track.lucene.util.StringUtil.read(StringUtil.java:216) // at com.aurel.track.lucene.util.StringUtil.read(StringUtil.java:212) assertNotNull(result); }
From source file:io.fabric8.forge.camel.commands.project.AbstractCamelProjectCommand.java
public static String extractComponentJavaType(Dependency dependency, String scheme) { try {//from www .j a v a2 s . c o m // is it a JAR file File file = dependency.getArtifact().getUnderlyingResourceObject(); if (file != null && file.getName().toLowerCase().endsWith(".jar")) { URL url = new URL("file:" + file.getAbsolutePath()); URLClassLoader child = new URLClassLoader(new URL[] { url }); InputStream is = child .getResourceAsStream("META-INF/services/org/apache/camel/component/" + scheme); if (is != null) { Properties props = new Properties(); props.load(is); return (String) props.get("class"); } } } catch (Throwable e) { // ignore } return null; }
From source file:com.streamsets.datacollector.cluster.TestShellClusterProvider.java
@Test public void testYarnStreamingWithKerberos() throws Throwable { String id = "application_1429587312661_0025"; MockSystemProcess.output.add(" " + id + " "); MockSystemProcess.output.add(" " + id + " "); System.setProperty("java.security.auth.login.config", "/etc/kafka-client-jaas.conf"); File keytab = new File(resourcesDir, "sdc.keytab"); keytab.createNewFile();// ww w . j a v a 2 s . c om String principal = "sdc/foohost"; PipelineConfiguration pipelineConfKrb = pipelineConf .createWithNewConfig(new Config("kerberosPrincipal", "sdc")) .createWithNewConfig(new Config("kerberosKeytab", keytab.getAbsolutePath())); pipelineConfKrb.setPipelineInfo(new PipelineInfo("name", "label", "desc", null, null, "aaa", null, null, null, true, null, "x", "y")); URLClassLoader emptyCL = new URLClassLoader(new URL[0]); RuntimeInfo runtimeInfo = new StandaloneRuntimeInfo(SDC_TEST_PREFIX, null, Arrays.asList(emptyCL), tempDir); Configuration conf = new Configuration(); conf.set(SecurityConfiguration.KERBEROS_ENABLED_KEY, true); conf.set(SecurityConfiguration.KERBEROS_KEYTAB_KEY, keytab.getAbsolutePath()); conf.set(SecurityConfiguration.KERBEROS_PRINCIPAL_KEY, "sdc/foohost"); sparkProvider = Mockito.spy(new ShellClusterProvider(runtimeInfo, new SecurityConfiguration(runtimeInfo, conf), conf, stageLibrary)); Mockito.doReturn(new MockSystemProcessFactory()).when(sparkProvider).getSystemProcessFactory(); Mockito.doReturn(ShellClusterProvider.CLUSTER_BOOTSTRAP_API_JAR_PATTERN).when(sparkProvider) .findClusterBootstrapJar(Mockito.eq(ExecutionMode.CLUSTER_BATCH), Mockito.any(PipelineConfiguration.class), Mockito.any(StageLibraryTask.class)); Map<String, String> sourceInfoCopy = new HashMap<>(sourceInfo); sourceInfoCopy.put(ClusterModeConstants.EXTRA_KAFKA_CONFIG_PREFIX + "security.protocol", "SASL_PLAINTEXT"); Assert.assertEquals(id, sparkProvider.startPipeline(providerTemp, sourceInfoCopy, pipelineConfKrb, pipelineConfigBean, stageLibrary, Mockito.mock(CredentialStoresTask.class), etcDir, resourcesDir, webDir, bootstrapLibDir, classLoader, classLoader, 60, new RuleDefinitions(PipelineStoreTask.RULE_DEFINITIONS_SCHEMA_VERSION, RuleDefinitionsConfigBean.VERSION, Collections.<MetricsRuleDefinition>emptyList(), Collections.<DataRuleDefinition>emptyList(), Collections.<DriftRuleDefinition>emptyList(), Collections.<String>emptyList(), UUID.randomUUID(), Collections.<Config>emptyList()), null, null, null).getAppId()); Assert.assertArrayEquals(new String[] { "<masked>/libexec/_cluster-manager", "start", "--master", "yarn", "--deploy-mode", "cluster", "--executor-memory", "512m", "--executor-cores", "1", "--num-executors", "64", "--archives", "<masked>/provider-temp/staging/libs.tar" + ".gz,<masked>/provider-temp/staging/etc.tar.gz,<masked>/provider-temp/staging/resources.tar.gz", "--files", "<masked>/provider-temp/staging/log4j.properties", "--jars", "<masked>/bootstrap-lib/main/streamsets-datacollector-bootstrap-1.7.0.0-SNAPSHOT.jar," + "<masked>/bootstrap-lib/cluster/streamsets-datacollector-cluster-bootstrap-1.7.0.0-SNAPSHOT.jar", "--keytab", "<masked>/resources-src/sdc.keytab", "--principal", principal, "--conf", "spark.driver.extraJavaOptions=-Djava.security.auth.login.config=/etc/kafka-client-jaas.conf", "--conf", "spark.executor.extraJavaOptions=" + "-javaagent:./streamsets-datacollector-bootstrap-1.7.0.0-SNAPSHOT.jar -Djava.security.auth.login.config=/etc/kafka-client-jaas.conf", "--conf", "a=b", "--name", "StreamSets Data Collector: label", "--class", "com" + ".streamsets.pipeline.BootstrapClusterStreaming", "<masked>/bootstrap-lib/cluster/streamsets-datacollector-cluster-bootstrap-api-1.7.0.0-SNAPSHOT.jar" }, MockSystemProcess.args.toArray()); }