Example usage for java.net URLClassLoader URLClassLoader

List of usage examples for java.net URLClassLoader URLClassLoader

Introduction

In this page you can find the example usage for java.net URLClassLoader URLClassLoader.

Prototype

URLClassLoader(URL[] urls, AccessControlContext acc) 

Source Link

Usage

From source file:org.apache.nutch.webapp.common.PluginResourceLoader.java

protected void init(ServiceLocator locator, ClassLoader parent) {

    ArrayList seen = new ArrayList();

    ArrayList paths = new ArrayList();

    if (LOG.isDebugEnabled()) {
        LOG.debug("PluginResourceLoader : dynamically setting jars based "
                + "on plugins implementing UIExtensionPoint.");
    }//from w ww  .  j a v a2  s . c o m

    ExtensionPoint point = locator.getPluginRepository().getExtensionPoint(UIExtensionPoint.X_POINT_ID);

    if (point == null) {
        LOG.info("Can't find extension point '" + UIExtensionPoint.X_POINT_ID + "'");
        classloaders.add(parent);
        return;
    }

    Extension[] extensions = point.getExtensions();

    for (int i = 0; i < extensions.length; i++) {
        Extension extension = extensions[i];

        PluginClassLoader loader = extension.getDescriptor().getClassLoader();

        // classloaders.add(loader);

        URL[] urls = loader.getURLs();

        for (int k = 0; k < urls.length; k++) {
            URL url = urls[k];
            if (!seen.contains(url)) {
                paths.add(url);
                LOG.debug("Adding to classpath:" + url);
            }
            seen.add(url);
        }
    }

    URL[] urls = (URL[]) paths.toArray(new URL[paths.size()]);
    classloaders.add(new URLClassLoader(urls, parent));

}

From source file:com.avego.oauth.migration.OauthDataMigratorTest.java

private void addTestPrincipalClass(OauthDataMigrator migrator) throws MalformedURLException {
    // we need to add the principal class used for the test to the deserialization class loader
    URLClassLoader testCl = new URLClassLoader(new URL[] { getTestClassesDir().toURI().toURL() },
            migrator.getDeserialisationClassLoader());
    migrator.setDeserialisationClassLoader(testCl);
}

From source file:org.ebayopensource.turmeric.runtime.tests.common.jetty.AbstractWithProxyServerTest.java

/**
 * Create a proxied service that uses the embedded jetty proxy server + embedded jetty SPF server.
 * <p>//from   w  w w.  ja v a2  s  . co m
 * The process here is:
 * <p>
 * <ol>
 * <li>Read the raw ClientConfig.xml as specified by the clientName parameter</li>
 * <li>Updates the <code>PROXY_HOST</code> and <code>PROXY_PORT</code> values present in the raw ClientConfig.xml</li>
 * <li>Write the modified ClientConfig.xml back out to a test specific directory (
 * <code>target/tests/{testClassName}/{testMethodName}/res/</code>) using the path
 * <code>META-INF/soa/client/config/{clientName}_modified/ClientConfig.xml</code></li>
 * <li>Using a test local classloader, and the {@link ServiceFactory#create(String, String, URL)} method to load the
 * modified ClientConfig.xml to create a {@link Service} suitable for talking to the embedded jetty proxy server.</li>
 * </ol>
 * 
 * @param serviceAdminName
 *            the service admin name.
 * @param clientName
 *            the client name.
 * @return
 */
protected Service createProxiedService(String serviceAdminName, String clientName) throws Exception {
    // Read/Parse baseline ClientConfig.xml

    String rawConfigPath = String.format("META-INF/soa/client/config/%s/ClientConfig.xml", clientName);
    URL rawConfigUrl = ClassLoader.getSystemResource(rawConfigPath);
    Assert.assertThat("Unable to find config resource: " + rawConfigPath, rawConfigUrl, notNullValue());

    Document doc = parseXml(rawConfigUrl);

    // Modify the PROXY_HOST and PROXY_PORT to point to embedded jetty server

    XPath expression = new JDOMXPath("//e:default-options/e:other-options/e:option");
    expression.addNamespace("e", "http://www.ebayopensource.org/turmeric/common/config");
    // Navigator navigator = expression.getNavigator();

    @SuppressWarnings("unchecked")
    List<Element> nodes = expression.selectNodes(doc);
    for (Element elem : nodes) {
        String optName = elem.getAttributeValue("name").trim();
        if ("PROXY_HOST".equals(optName)) {
            elem.setText(proxyUri.getHost());
        } else if ("PROXY_PORT".equals(optName)) {
            elem.setText(String.valueOf(proxyUri.getPort()));
        }
    }

    // Write modified document out to target directory

    File testingResourceDir = new File(testingdir.getDir(), "res");
    MavenTestingUtils.ensureEmpty(testingResourceDir);
    String modifiedConfigPath = String.format("META-INF/soa/client/config/%s_modified/ClientConfig.xml",
            clientName);
    File outputFile = new File(testingResourceDir, modifiedConfigPath);
    MavenTestingUtils.ensureDirExists(outputFile.getParentFile());

    System.out.println("Writing modified ClientConfig to " + outputFile.getAbsolutePath());
    writeXml(outputFile, doc);

    // Let ServiceFactory create service from modified ClientConfig.xml
    URL urls[] = new URL[] { testingResourceDir.toURI().toURL() };
    URLClassLoader testingSpecificCL = new URLClassLoader(urls, this.getClass().getClassLoader());

    ClassLoader original = Thread.currentThread().getContextClassLoader();
    try {
        Thread.currentThread().setContextClassLoader(testingSpecificCL);
        return ServiceFactory.create(serviceAdminName, clientName + "_modified", null);
    } finally {
        Thread.currentThread().setContextClassLoader(original);
    }
}

From source file:org.godhuli.rhipe.RHMR.java

public void setConf() throws IOException, URISyntaxException {
    Enumeration keys = rhoptions_.keys();
    while (keys.hasMoreElements()) {
        String key = (String) keys.nextElement();
        String value = (String) rhoptions_.get(key);
        config_.set(key, value);/*  ww  w.j a v a 2  s .  com*/
        // System.out.println(key+"==="+value);
    }
    REXPHelper.setFieldSep(config_.get("mapred.field.separator", " "));

    String[] shared = config_.get("rhipe_shared").split(",");
    if (shared != null) {
        for (String p : shared)
            if (p.length() > 1)
                DistributedCache.addCacheFile(new URI(p), config_);
    }
    String[] jarfiles = config_.get("rhipe_jarfiles").split(",");
    if (jarfiles != null) {
        for (String p : jarfiles) {
            // System.err.println("Adding "+ p +" to classpath");
            if (p.length() > 1)
                DistributedCache.addArchiveToClassPath(new Path(p), config_);
        }
    }
    String[] zips = config_.get("rhipe_zips").split(",");
    if (zips != null) {
        for (String p : zips) {
            // System.err.println("Adding zip "+ p +" to cache");
            if (p.length() > 1)
                DistributedCache.addCacheArchive(new URI(p), config_);
        }
    }

    DistributedCache.createSymlink(config_);
    if (!rhoptions_.get("rhipe_classpaths").equals("")) {
        String[] cps = rhoptions_.get("rhipe_classpaths").split(",");
        URL[] us = new URL[cps.length];
        for (int i = 0; i < cps.length; i++) {
            try {
                us[i] = (new File(cps[i])).toURI().toURL();
            } catch (java.net.MalformedURLException e) {
                throw new IOException(e);
            }
        }
        config_.setClassLoader(new URLClassLoader(us, config_.getClassLoader()));
        Thread.currentThread()
                .setContextClassLoader(new URLClassLoader(us, Thread.currentThread().getContextClassLoader()));
    }

}

From source file:org.grouplens.lenskit.eval.cli.EvalCLIOptions.java

public ClassLoader getClassLoader(ClassLoader parent) {
    if (classpathUrls == null) {
        return parent;
    } else {/*from w w  w  .j av a 2s .  c  om*/
        return new URLClassLoader(classpathUrls, parent);
    }
}

From source file:org.fusesource.meshkeeper.classloader.basic.BasicClassLoaderFactory.java

public ClassLoader createClassLoader(ClassLoader parent, File cacheDir) throws Exception {

    //        parent = createRemoteClassLoader(server.getParent(), cacheDir, depth - 1, parent);
    List<BasicClassLoaderServer.PathElement> elements = server.getPathElements(id);
    if (elements == null) {
        // That classloader was not URL classloader based, so we could not import it
        // by downloading it's jars.. we will have to use dynamically.
        // return new RemoteClassLoader(parent, server);
        throw new IOException("Unexpected Remote Response");
    }/*from  w w  w  .  j a va  2  s .  c  o m*/

    // We can build stadard URLClassLoader by downloading all the
    // jars or using the same URL elements as the original classloader.
    ArrayList<URL> urls = new ArrayList<URL>();
    for (BasicClassLoaderServer.PathElement element : elements) {

        if (element.url != null) {
            urls.add(element.url);
        } else {

            cacheDir.mkdirs();
            String name = "";
            if (element.name != null) {
                if (element.name.indexOf(".") > 0) {
                    String suffix = element.name.substring(element.name.lastIndexOf("."));
                    String prefix = element.name.substring(0, element.name.lastIndexOf("."));
                    name = prefix + "_" + HexSupport.toHexFromBytes(element.fingerprint) + suffix;
                } else {
                    name = HexSupport.toHexFromBytes(element.fingerprint) + "_" + element.name;
                }
            } else {
                name = HexSupport.toHexFromBytes(element.fingerprint) + ".jar";
            }
            File file = new File(cacheDir, name);

            if (!file.exists()) {
                LOG.debug("Downloading: " + file);
                // We need to download it...
                File tmp = null;
                FileOutputStream out = null;
                try {
                    tmp = File.createTempFile(name, ".part", cacheDir);
                    out = new FileOutputStream(tmp);
                    int pos = 0;
                    while (true) {
                        byte[] data = server.download(element.id, pos, CHUNK_SIZE);
                        out.write(data);
                        if (data.length < CHUNK_SIZE) {
                            break;
                        }
                        pos += CHUNK_SIZE;
                    }
                } finally {
                    try {
                        out.close();
                    } catch (Throwable e) {
                    }
                }
                if (!tmp.renameTo(file)) {
                    tmp.delete();
                }
            }

            // It may be in the cache dir already...
            if (file.exists()) {
                if (!Arrays.equals(element.fingerprint, fingerprint(new FileInputStream(file)))
                        || element.length != file.length()) {
                    throw new IOException("fingerprint missmatch: " + name);
                }

                urls.add(file.toURI().toURL());
            } else {
                throw new IOException("Could not download: " + name);
            }
        }
    }

    URL t[] = new URL[urls.size()];
    urls.toArray(t);
    if (LOG.isDebugEnabled()) {
        LOG.debug("Created URL class loader with: " + urls);
    }
    return new URLClassLoader(t, parent) {

        protected Class<?> findClass(String name) throws ClassNotFoundException {
            try {
                return super.findClass(name);
            } catch (ClassNotFoundException e) {
                if (LOG.isTraceEnabled()) {
                    LOG.trace("Couldn't find class: " + name);
                }
                throw e;
            }
        }

        protected Class<?> loadClass(String name, boolean resolveIt) throws ClassNotFoundException {
            Class<?> c = super.loadClass(name, resolveIt);
            if (c != null)
                if (LOG.isTraceEnabled()) {
                    LOG.trace("Loaded class: " + c.getName());
                }
            return c;
        }
    };
}

From source file:com.ikanow.infinit.e.processing.custom.launcher.CustomHadoopTaskLauncher.java

@SuppressWarnings({ "unchecked", "rawtypes" })
public String runHadoopJob(CustomMapReduceJobPojo job, String tempJarLocation)
        throws IOException, SAXException, ParserConfigurationException {
    StringWriter xml = new StringWriter();
    String outputCollection = job.outputCollectionTemp;// (non-append mode) 
    if ((null != job.appendResults) && job.appendResults)
        outputCollection = job.outputCollection; // (append mode, write directly in....)
    else if (null != job.incrementalMode)
        job.incrementalMode = false; // (not allowed to be in incremental mode and not update mode)

    createConfigXML(xml, job.jobtitle, job.inputCollection,
            InfiniteHadoopUtils.getQueryOrProcessing(job.query, InfiniteHadoopUtils.QuerySpec.INPUTFIELDS),
            job.isCustomTable, job.getOutputDatabase(), job._id.toString(), outputCollection, job.mapper,
            job.reducer, job.combiner,//from   w w w. ja v a2s .  c om
            InfiniteHadoopUtils.getQueryOrProcessing(job.query, InfiniteHadoopUtils.QuerySpec.QUERY),
            job.communityIds, job.outputKey, job.outputValue, job.arguments, job.incrementalMode,
            job.submitterID, job.selfMerge, job.outputCollection, job.appendResults);

    ClassLoader savedClassLoader = Thread.currentThread().getContextClassLoader();

    URLClassLoader child = new URLClassLoader(new URL[] { new File(tempJarLocation).toURI().toURL() },
            savedClassLoader);
    Thread.currentThread().setContextClassLoader(child);

    // Check version: for now, any infinit.e.data_model with an VersionTest class is acceptable
    boolean dataModelLoaded = true;
    try {
        URLClassLoader versionTest = new URLClassLoader(new URL[] { new File(tempJarLocation).toURI().toURL() },
                null);
        try {
            Class.forName("com.ikanow.infinit.e.data_model.custom.InfiniteMongoInputFormat", true, versionTest);
        } catch (ClassNotFoundException e2) {
            //(this is fine, will use the cached version)
            dataModelLoaded = false;
        }
        if (dataModelLoaded)
            Class.forName("com.ikanow.infinit.e.data_model.custom.InfiniteMongoVersionTest", true, versionTest);
    } catch (ClassNotFoundException e1) {
        throw new RuntimeException(
                "This JAR is compiled with too old a version of the data-model, please recompile with Jan 2014 (rc2) onwards");
    }

    // Now load the XML into a configuration object: 
    Configuration config = new Configuration();
    // Add the client configuration overrides:
    if (!bLocalMode) {
        String hadoopConfigPath = props_custom.getHadoopConfigPath() + "/hadoop/";
        config.addResource(new Path(hadoopConfigPath + "core-site.xml"));
        config.addResource(new Path(hadoopConfigPath + "mapred-site.xml"));
        config.addResource(new Path(hadoopConfigPath + "hadoop-site.xml"));
    } //TESTED

    try {
        DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
        DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
        Document doc = dBuilder.parse(new ByteArrayInputStream(xml.toString().getBytes()));
        NodeList nList = doc.getElementsByTagName("property");

        for (int temp = 0; temp < nList.getLength(); temp++) {
            Node nNode = nList.item(temp);
            if (nNode.getNodeType() == Node.ELEMENT_NODE) {
                Element eElement = (Element) nNode;
                String name = getTagValue("name", eElement);
                String value = getTagValue("value", eElement);
                if ((null != name) && (null != value)) {
                    config.set(name, value);
                }
            }
        }
    } catch (Exception e) {
        throw new IOException(e.getMessage());
    }

    // Some other config defaults:
    // (not sure if these are actually applied, or derived from the defaults - for some reason they don't appear in CDH's client config)
    config.set("mapred.map.tasks.speculative.execution", "false");
    config.set("mapred.reduce.tasks.speculative.execution", "false");
    // (default security is ignored here, have it set via HADOOP_TASKTRACKER_CONF in cloudera)

    // Now run the JAR file
    try {
        BasicDBObject advancedConfigurationDbo = null;
        try {
            advancedConfigurationDbo = (null != job.query)
                    ? ((BasicDBObject) com.mongodb.util.JSON.parse(job.query))
                    : (new BasicDBObject());
        } catch (Exception e) {
            advancedConfigurationDbo = new BasicDBObject();
        }
        boolean esMode = advancedConfigurationDbo.containsField("qt") && !job.isCustomTable;
        if (esMode && !job.inputCollection.equals("doc_metadata.metadata")) {
            throw new RuntimeException(
                    "Infinit.e Queries are only supported on doc_metadata - use MongoDB queries instead.");
        }

        config.setBoolean("mapred.used.genericoptionsparser", true); // (just stops an annoying warning from appearing)
        if (bLocalMode) { // local job tracker and FS mode
            config.set("mapred.job.tracker", "local");
            config.set("fs.default.name", "local");
        } else {
            if (bTestMode) { // run job tracker locally but FS mode remotely
                config.set("mapred.job.tracker", "local");
            } else { // normal job tracker
                String trackerUrl = HadoopUtils.getXMLProperty(
                        props_custom.getHadoopConfigPath() + "/hadoop/mapred-site.xml", "mapred.job.tracker");
                config.set("mapred.job.tracker", trackerUrl);
            }
            String fsUrl = HadoopUtils.getXMLProperty(
                    props_custom.getHadoopConfigPath() + "/hadoop/core-site.xml", "fs.default.name");
            config.set("fs.default.name", fsUrl);
        }
        if (!dataModelLoaded && !(bTestMode || bLocalMode)) { // If running distributed and no data model loaded then add ourselves
            Path jarToCache = InfiniteHadoopUtils.cacheLocalFile("/opt/infinite-home/lib/",
                    "infinit.e.data_model.jar", config);
            DistributedCache.addFileToClassPath(jarToCache, config);
            jarToCache = InfiniteHadoopUtils.cacheLocalFile("/opt/infinite-home/lib/",
                    "infinit.e.processing.custom.library.jar", config);
            DistributedCache.addFileToClassPath(jarToCache, config);
        } //TESTED

        // Debug scripts (only if they exist), and only in non local/test mode
        if (!bLocalMode && !bTestMode) {

            try {
                Path scriptToCache = InfiniteHadoopUtils.cacheLocalFile("/opt/infinite-home/scripts/",
                        "custom_map_error_handler.sh", config);
                config.set("mapred.map.task.debug.script", "custom_map_error_handler.sh " + job.jobtitle);
                config.set("mapreduce.map.debug.script", "custom_map_error_handler.sh " + job.jobtitle);
                DistributedCache.createSymlink(config);
                DistributedCache.addCacheFile(scriptToCache.toUri(), config);
            } catch (Exception e) {
            } // just carry on

            try {
                Path scriptToCache = InfiniteHadoopUtils.cacheLocalFile("/opt/infinite-home/scripts/",
                        "custom_reduce_error_handler.sh", config);
                config.set("mapred.reduce.task.debug.script", "custom_reduce_error_handler.sh " + job.jobtitle);
                config.set("mapreduce.reduce.debug.script", "custom_reduce_error_handler.sh " + job.jobtitle);
                DistributedCache.createSymlink(config);
                DistributedCache.addCacheFile(scriptToCache.toUri(), config);
            } catch (Exception e) {
            } // just carry on

        } //TODO (???): TOTEST

        // (need to do these 2 things here before the job is created, at which point the config class has been copied across)
        //1)
        Class<?> mapperClazz = Class.forName(job.mapper, true, child);
        if (ICustomInfiniteInternalEngine.class.isAssignableFrom(mapperClazz)) { // Special case: internal custom engine, so gets an additional integration hook
            ICustomInfiniteInternalEngine preActivities = (ICustomInfiniteInternalEngine) mapperClazz
                    .newInstance();
            preActivities.preTaskActivities(job._id, job.communityIds, config, !(bTestMode || bLocalMode));
        } //TESTED
          //2)
        if (job.inputCollection.equalsIgnoreCase("file.binary_shares")) {
            // Need to download the GridFSZip file
            try {
                Path jarToCache = InfiniteHadoopUtils.cacheLocalFile("/opt/infinite-home/lib/unbundled/",
                        "GridFSZipFile.jar", config);
                DistributedCache.addFileToClassPath(jarToCache, config);
            } catch (Throwable t) {
            } // (this is fine, will already be on the classpath .. otherwise lots of other stuff will be failing all over the place!)            
        }

        if (job.inputCollection.equals("records")) {

            InfiniteElasticsearchHadoopUtils.handleElasticsearchInput(job, config, advancedConfigurationDbo);

            //(won't run under 0.19 so running with "records" should cause all sorts of exceptions)

        } //TESTED (by hand)         

        if (bTestMode || bLocalMode) { // If running locally, turn "snappy" off - tomcat isn't pointing its native library path in the right place
            config.set("mapred.map.output.compression.codec", "org.apache.hadoop.io.compress.DefaultCodec");
        }

        // Manually specified caches
        List<URL> localJarCaches = InfiniteHadoopUtils.handleCacheList(advancedConfigurationDbo.get("$caches"),
                job, config, props_custom);

        Job hj = new Job(config); // (NOTE: from here, changes to config are ignored)
        try {

            if (null != localJarCaches) {
                if (bLocalMode || bTestMode) {
                    Method method = URLClassLoader.class.getDeclaredMethod("addURL", new Class[] { URL.class });
                    method.setAccessible(true);
                    method.invoke(child, localJarCaches.toArray());

                } //TOTEST (tested logically)
            }
            Class<?> classToLoad = Class.forName(job.mapper, true, child);
            hj.setJarByClass(classToLoad);

            if (job.inputCollection.equalsIgnoreCase("filesystem")) {
                String inputPath = null;
                try {
                    inputPath = MongoDbUtil.getProperty(advancedConfigurationDbo, "file.url");
                    if (!inputPath.endsWith("/")) {
                        inputPath = inputPath + "/";
                    }
                } catch (Exception e) {
                }
                if (null == inputPath) {
                    throw new RuntimeException("Must specify 'file.url' if reading from filesystem.");
                }
                inputPath = InfiniteHadoopUtils.authenticateInputDirectory(job, inputPath);

                InfiniteFileInputFormat.addInputPath(hj, new Path(inputPath + "*/*")); // (that extra bit makes it recursive)
                InfiniteFileInputFormat.setMaxInputSplitSize(hj, 33554432); // (32MB)
                InfiniteFileInputFormat.setInfiniteInputPathFilter(hj, config);
                hj.setInputFormatClass((Class<? extends InputFormat>) Class.forName(
                        "com.ikanow.infinit.e.data_model.custom.InfiniteFileInputFormat", true, child));
            } else if (job.inputCollection.equalsIgnoreCase("file.binary_shares")) {

                String[] oidStrs = null;
                try {
                    String inputPath = MongoDbUtil.getProperty(advancedConfigurationDbo, "file.url");
                    Pattern oidExtractor = Pattern.compile("inf://share/([^/]+)");
                    Matcher m = oidExtractor.matcher(inputPath);
                    if (m.find()) {
                        oidStrs = m.group(1).split("\\s*,\\s*");

                    } else {
                        throw new RuntimeException(
                                "file.url must be in format inf://share/<oid-list>/<string>: " + inputPath);
                    }
                    InfiniteHadoopUtils.authenticateShareList(job, oidStrs);
                } catch (Exception e) {
                    throw new RuntimeException(
                            "Authentication error: " + e.getMessage() + ": " + advancedConfigurationDbo, e);
                }

                hj.getConfiguration().setStrings("mapred.input.dir", oidStrs);
                hj.setInputFormatClass((Class<? extends InputFormat>) Class.forName(
                        "com.ikanow.infinit.e.data_model.custom.InfiniteShareInputFormat", true, child));
            } else if (job.inputCollection.equals("records")) {
                hj.setInputFormatClass((Class<? extends InputFormat>) Class
                        .forName("com.ikanow.infinit.e.data_model.custom.InfiniteEsInputFormat", true, child));
            } else {
                if (esMode) {
                    hj.setInputFormatClass((Class<? extends InputFormat>) Class.forName(
                            "com.ikanow.infinit.e.processing.custom.utils.InfiniteElasticsearchMongoInputFormat",
                            true, child));
                } else {
                    hj.setInputFormatClass((Class<? extends InputFormat>) Class.forName(
                            "com.ikanow.infinit.e.data_model.custom.InfiniteMongoInputFormat", true, child));
                }
            }
            if ((null != job.exportToHdfs) && job.exportToHdfs) {

                //TODO (INF-2469): Also, if the output key is BSON then also run as text (but output as JSON?)

                Path outPath = InfiniteHadoopUtils.ensureOutputDirectory(job, props_custom);

                if ((null != job.outputKey) && (null != job.outputValue)
                        && job.outputKey.equalsIgnoreCase("org.apache.hadoop.io.text")
                        && job.outputValue.equalsIgnoreCase("org.apache.hadoop.io.text")) {
                    // (slight hack before I sort out the horrendous job class - if key/val both text and exporting to HDFS then output as Text)
                    hj.setOutputFormatClass((Class<? extends OutputFormat>) Class
                            .forName("org.apache.hadoop.mapreduce.lib.output.TextOutputFormat", true, child));
                    TextOutputFormat.setOutputPath(hj, outPath);
                } //TESTED
                else {
                    hj.setOutputFormatClass((Class<? extends OutputFormat>) Class.forName(
                            "org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat", true, child));
                    SequenceFileOutputFormat.setOutputPath(hj, outPath);
                } //TESTED
            } else { // normal case, stays in MongoDB
                hj.setOutputFormatClass((Class<? extends OutputFormat>) Class.forName(
                        "com.ikanow.infinit.e.data_model.custom.InfiniteMongoOutputFormat", true, child));
            }
            hj.setMapperClass((Class<? extends Mapper>) mapperClazz);
            String mapperOutputKeyOverride = advancedConfigurationDbo.getString("$mapper_key_class", null);
            if (null != mapperOutputKeyOverride) {
                hj.setMapOutputKeyClass(Class.forName(mapperOutputKeyOverride));
            } //TESTED 

            String mapperOutputValueOverride = advancedConfigurationDbo.getString("$mapper_value_class", null);
            if (null != mapperOutputValueOverride) {
                hj.setMapOutputValueClass(Class.forName(mapperOutputValueOverride));
            } //TESTED 

            if ((null != job.reducer) && !job.reducer.startsWith("#") && !job.reducer.equalsIgnoreCase("null")
                    && !job.reducer.equalsIgnoreCase("none")) {
                hj.setReducerClass((Class<? extends Reducer>) Class.forName(job.reducer, true, child));
                // Variable reducers:
                if (null != job.query) {
                    try {
                        hj.setNumReduceTasks(advancedConfigurationDbo.getInt("$reducers", 1));
                    } catch (Exception e) {
                        try {
                            // (just check it's not a string that is a valid int)
                            hj.setNumReduceTasks(
                                    Integer.parseInt(advancedConfigurationDbo.getString("$reducers", "1")));
                        } catch (Exception e2) {
                        }
                    }
                } //TESTED
            } else {
                hj.setNumReduceTasks(0);
            }
            if ((null != job.combiner) && !job.combiner.startsWith("#")
                    && !job.combiner.equalsIgnoreCase("null") && !job.combiner.equalsIgnoreCase("none")) {
                hj.setCombinerClass((Class<? extends Reducer>) Class.forName(job.combiner, true, child));
            }
            hj.setOutputKeyClass(Class.forName(job.outputKey, true, child));
            hj.setOutputValueClass(Class.forName(job.outputValue, true, child));

            hj.setJobName(job.jobtitle);
            currJobName = job.jobtitle;
        } catch (Error e) { // (messing about with class loaders = lots of chances for errors!)
            throw new RuntimeException(e.getMessage(), e);
        }
        if (bTestMode || bLocalMode) {
            hj.submit();
            currThreadId = null;
            Logger.getRootLogger().addAppender(this);
            currLocalJobId = hj.getJobID().toString();
            currLocalJobErrs.setLength(0);
            while (!hj.isComplete()) {
                Thread.sleep(1000);
            }
            Logger.getRootLogger().removeAppender(this);
            if (hj.isSuccessful()) {
                if (this.currLocalJobErrs.length() > 0) {
                    return "local_done: " + this.currLocalJobErrs.toString();
                } else {
                    return "local_done";
                }
            } else {
                return "Error: " + this.currLocalJobErrs.toString();
            }
        } else {
            hj.submit();
            String jobId = hj.getJobID().toString();
            return jobId;
        }
    } catch (Exception e) {
        e.printStackTrace();
        Thread.currentThread().setContextClassLoader(savedClassLoader);
        return "Error: " + InfiniteHadoopUtils.createExceptionMessage(e);
    } finally {
        Thread.currentThread().setContextClassLoader(savedClassLoader);
    }
}

From source file:org.apache.accumulo.start.classloader.AccumuloClassLoader.java

public static synchronized ClassLoader getClassLoader() throws IOException {
    if (classloader == null) {
        ArrayList<URL> urls = findAccumuloURLs();

        ClassLoader parentClassLoader = AccumuloClassLoader.class.getClassLoader();

        log.debug("Create 2nd tier ClassLoader using URLs: {}", urls);
        classloader = new URLClassLoader(urls.toArray(new URL[urls.size()]), parentClassLoader) {
            @Override/*from  w w  w  .  j a v  a  2s  .c  om*/
            protected synchronized Class<?> loadClass(String name, boolean resolve)
                    throws ClassNotFoundException {

                if (name.startsWith("org.apache.accumulo.start.classloader.vfs")) {
                    Class<?> c = findLoadedClass(name);
                    if (c == null) {
                        try {
                            // try finding this class here instead of parent
                            findClass(name);
                        } catch (ClassNotFoundException e) {
                        }
                    }
                }
                return super.loadClass(name, resolve);
            }
        };
    }

    return classloader;
}

From source file:com.agileapes.couteau.maven.mojo.AbstractPluginExecutor.java

/**
 * This method will try to get a class loader with access to the target project's classes and dependencies
 *
 * @return the class loader/*from w  w  w .  j  a va  2  s  . co  m*/
 */
public ClassLoader getProjectClassLoader() {
    if (projectClassLoader != null) {
        return projectClassLoader;
    }
    ClassLoader projectClassLoader;
    try {
        List<String> classpathElements = new ArrayList<String>();
        final Set dependencyArtifacts = getProject().getDependencyArtifacts();
        for (Object object : dependencyArtifacts) {
            if (object instanceof Artifact) {
                Artifact artifact = (Artifact) object;
                boolean add = true;
                for (Filter<Artifact> filter : artifactFilters) {
                    if (!filter.accepts(artifact)) {
                        add = false;
                        break;
                    }
                }
                if (add) {
                    classpathElements.add(artifact.getFile().getAbsolutePath());
                }
            }
        }
        classpathElements.add(getProject().getBuild().getOutputDirectory());
        URL urls[] = new URL[classpathElements.size()];
        for (int i = 0; i < classpathElements.size(); ++i) {
            urls[i] = new File(classpathElements.get(i)).toURI().toURL();
        }
        projectClassLoader = new URLClassLoader(urls, this.getClass().getClassLoader());
    } catch (Exception e) {
        getLog().warn("Failed to setup project class loader");
        projectClassLoader = this.getClass().getClassLoader();
    }
    return this.projectClassLoader = new ConfigurableClassLoader(projectClassLoader);
}

From source file:com.norconex.commons.lang.ClassFinder.java

private static ClassLoader getClassLoader(File url) {
    try {//from  www. j  a  v a 2s  . co m
        URL dirURL = url.toURI().toURL();
        return new URLClassLoader(new URL[] { dirURL }, ClassFinder.class.getClassLoader());
    } catch (MalformedURLException e) {
        LOG.error("Invalid classpath: " + url, e);
        return null;
    }
}