Example usage for org.apache.hadoop.conf Configuration setClassLoader

List of usage examples for org.apache.hadoop.conf Configuration setClassLoader

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration setClassLoader.

Prototype

public void setClassLoader(ClassLoader classLoader) 

Source Link

Document

Set the class loader that will be used to load the various objects.

Usage

From source file:eu.stratosphere.yarn.Utils.java

License:Apache License

private static void addPathToConfig(Configuration conf, File path) {
    // chain-in a new classloader
    URL fileUrl = null;/*w ww .j  av a 2 s  . co  m*/
    try {
        fileUrl = path.toURL();
    } catch (MalformedURLException e) {
        throw new RuntimeException("Erroneous config file path", e);
    }
    URL[] urls = { fileUrl };
    ClassLoader cl = new URLClassLoader(urls, conf.getClassLoader());
    conf.setClassLoader(cl);
}

From source file:fi.tkk.ics.hadoop.bam.cli.Frontend.java

License:Open Source License

public static void main(String[] args) {

    final Thread thread = Thread.currentThread();

    // This naming scheme should become clearer below.
    final URLClassLoader loader2 = (URLClassLoader) thread.getContextClassLoader();

    // Parse Hadoop's generic options first, so that -libjars is handled
    // before we try to load plugins.
    final GenericOptionsParser parser;
    try {/*  ww  w . jav a2  s  .  c  o m*/
        parser = new GenericOptionsParser(args);

        // This should be IOException but Hadoop 0.20.2 doesn't throw it...
    } catch (Exception e) {
        System.err.printf("Error in Hadoop arguments: %s\n", e.getMessage());
        System.exit(1);

        // Hooray for javac
        return;
    }

    args = parser.getRemainingArgs();
    //final Configuration conf = ContextUtil.getConfiguration(parser);
    final Configuration conf = parser.getConfiguration();

    final URLClassLoader loader1 = (URLClassLoader) thread.getContextClassLoader();

    if (loader1 != loader2) {
        /* Set the thread's context class loader to a new one that includes
         * the URLs of both the current one and its parent. Replace those two
         * completely: have the new one delegate to the current one's
         * grandparent.
         *
         * This is necessary to support Hadoop's "-libjars" argument because
         * of the way Hadoop's "hadoop jar" command works: it doesn't handle
         * "-libjars" or anything like it, instead the GenericOptionsParser we
         * use above does. Since URLs can't be added to class loaders,
         * GenericOptionsParser creates a new loader, adds the paths given via
         * "-libjars" to it, and makes it delegate to the loader created by
         * "hadoop jar". So the class loader setup at this point looks like
         * the following:
         *
         * 1. The loader that knows about the "-libjars" parameters.
         * 2. The loader that knows about "hadoop jar"'s parameter: the jar we
         *    are running.
         * 3. The system class loader (I think), which was created when the
         *    "hadoop" script ran "java"; it knows about the main Hadoop jars,
         *    everything in HADOOP_CLASSPATH, etc.
         *
         * Here 3 is 2's parent and 2 is 1's parent. The result is that when
         * loading our own plugins, we end up finding them in 2, of course.
         * But if Picard was given in "-libjars", 2 can't see the dependencies
         * of those plugins, because they're not visible to it or its parents,
         * and thus throws a NoClassDefFoundError.
         *
         * Thus, we create a new class loader which combines 1 and 2 and
         * delegates to 3.
         *
         * Only done inside this if statement because otherwise we didn't get
         * "-libjars" and so loader 1 is missing, and we don't want to mess
         * with loader 3.
         */

        final URL[] urls1 = loader1.getURLs(), urls2 = loader2.getURLs(),
                allURLs = new URL[urls1.length + urls2.length];

        System.arraycopy(urls1, 0, allURLs, 0, urls1.length);
        System.arraycopy(urls2, 0, allURLs, urls1.length, urls2.length);

        thread.setContextClassLoader(new URLClassLoader(allURLs, loader2.getParent()));

        // Make sure Hadoop also uses the right class loader.
        conf.setClassLoader(thread.getContextClassLoader());
    }

    /* Call the go(args,conf) method of this class, but do it via
     * reflection, loading this class from the context class loader.
     *
     * This is because in Java, class identity is based on both class name
     * and class loader. Using the same loader identifiers as in the
     * previous comment, this class and the rest of Hadoop-BAM was
     * originally loaded by loader 2. Therefore if we were to call
     * go(args,conf) directly, plain old "CLIPlugin.class" would not be
     * compatible with any plugins that the new class loader finds,
     * because their CLIPlugin is a different CLIPlugin!
     *
     * Hence, jump into the new class loader. Both String[] and
     * Configuration are from loader 1 and thus we can safely pass those
     * from here (loader 2) to there (the new loader).
     */
    try {
        final Class<?> frontendClass = Class.forName(Frontend.class.getName(), true,
                thread.getContextClassLoader());

        final Method meth = frontendClass.getMethod("go", args.getClass(), conf.getClass());

        meth.invoke(null, args, conf);

    } catch (InvocationTargetException e) {
        // Presumably some RuntimeException thrown by go() for some reason.
        e.getCause().printStackTrace();
        System.exit(1);

    } catch (ClassNotFoundException e) {
        System.err.println("VERY STRANGE: could not reload Frontend class:");
        e.printStackTrace();

    } catch (NoSuchMethodException e) {
        System.err.println("VERY STRANGE: could not find our own method:");
        e.printStackTrace();

    } catch (IllegalAccessException e) {
        System.err.println("VERY STRANGE: not allowed to access our own method:");
        e.printStackTrace();
    }
    System.exit(112);
}

From source file:hudson.gridmaven.gridlayer.HadoopInstance.java

License:Open Source License

public HadoopInstance(Class c) {
    Configuration conf = new Configuration();
    conf.set("fs.default.name", "hdfs://localhost:9000/");
    conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
    conf.set("fs.file.impl", "org.apache.hadoop.fs.LocalFileSystem");

    // Workaround bug with hadoop Configuration classloader, which
    // do not know abou current classloader and replaces it with
    // CurrentThreadContextClassloader, and that cant see hadoop libraries loaded.
    Class a = c;/*from   w w w  .jav a  2 s  .co  m*/
    ClassLoader loader = a.getClassLoader();
    conf.setClassLoader(loader);

    this.conf = conf;
    // Retrieve fs if it can be done
    try {
        this.fs = FileSystem.get(conf);
    } catch (IOException ex) {
        ex.printStackTrace();
        Logger.getLogger(HadoopInstance.class.getName()).log(Level.SEVERE, null, ex);
    }

}

From source file:io.apigee.lembos.utils.RunnerUtils.java

License:Apache License

/**
 * Adds the "-libjars" entries, if any, to the {@link ClassLoader}.
 *
 * @param conf the Hadoop configuration//  w  w  w .  j  a  v a 2 s .  com
 *
 * @throws IOException if there is an issue creating the new ClassLoader
 */
public static void addLibJarsToClassLoader(final Configuration conf) throws IOException {
    final URL[] libJars = GenericOptionsParser.getLibJars(conf);

    if (libJars != null && libJars.length > 0) {
        final ClassLoader loader = new URLClassLoader(libJars, conf.getClassLoader());

        Thread.currentThread().setContextClassLoader(loader);

        conf.setClassLoader(loader);
    }
}

From source file:io.druid.storage.hdfs.HdfsStorageDruidModule.java

License:Apache License

@Override
public void configure(Binder binder) {
    MapBinder.newMapBinder(binder, String.class, SearchableVersionedDataFinder.class).addBinding(SCHEME)
            .to(HdfsFileTimestampVersionFinder.class).in(LazySingleton.class);

    Binders.dataSegmentPullerBinder(binder).addBinding(SCHEME).to(HdfsDataSegmentPuller.class)
            .in(LazySingleton.class);
    Binders.dataSegmentPusherBinder(binder).addBinding(SCHEME).to(HdfsDataSegmentPusher.class)
            .in(LazySingleton.class);
    Binders.dataSegmentKillerBinder(binder).addBinding(SCHEME).to(HdfsDataSegmentKiller.class)
            .in(LazySingleton.class);

    final Configuration conf = new Configuration();

    // Set explicit CL. Otherwise it'll try to use thread context CL, which may not have all of our dependencies.
    conf.setClassLoader(getClass().getClassLoader());

    // Ensure that FileSystem class level initialization happens with correct CL
    // See https://github.com/druid-io/druid/issues/1714
    ClassLoader currCtxCl = Thread.currentThread().getContextClassLoader();
    try {// w w w .  j av  a  2s.com
        Thread.currentThread().setContextClassLoader(getClass().getClassLoader());
        FileSystem.get(conf);
    } catch (IOException ex) {
        throw Throwables.propagate(ex);
    } finally {
        Thread.currentThread().setContextClassLoader(currCtxCl);
    }

    if (props != null) {
        for (String propName : System.getProperties().stringPropertyNames()) {
            if (propName.startsWith("hadoop.")) {
                conf.set(propName.substring("hadoop.".length()), System.getProperty(propName));
            }
        }
    }

    binder.bind(Configuration.class).toInstance(conf);
    JsonConfigProvider.bind(binder, "druid.storage", HdfsDataSegmentPusherConfig.class);

    Binders.taskLogsBinder(binder).addBinding("hdfs").to(HdfsTaskLogs.class);
    JsonConfigProvider.bind(binder, "druid.indexer.logs", HdfsTaskLogsConfig.class);
    binder.bind(HdfsTaskLogs.class).in(LazySingleton.class);
}

From source file:it.crs4.pydoop.mapreduce.pipes.CommandLineParser.java

License:Apache License

public int run(String[] args) throws Exception {
    CommandLineParser cli = new CommandLineParser();
    if (args.length == 0) {
        cli.printUsage();//from w ww. j a  v a 2  s.c om
        return 1;
    }
    try {
        Job job = new Job(new Configuration());
        job.setJobName(getClass().getName());
        Configuration conf = job.getConfiguration();
        CommandLine results = cli.parse(conf, args);
        if (results.hasOption("input")) {
            Path path = new Path(results.getOptionValue("input"));
            FileInputFormat.setInputPaths(job, path);
        }
        if (results.hasOption("output")) {
            Path path = new Path(results.getOptionValue("output"));
            FileOutputFormat.setOutputPath(job, path);
        }
        if (results.hasOption("jar")) {
            job.setJar(results.getOptionValue("jar"));
        }
        if (results.hasOption("inputformat")) {
            explicitInputFormat = true;
            setIsJavaRecordReader(conf, true);
            job.setInputFormatClass(getClass(results, "inputformat", conf, InputFormat.class));
        }
        if (results.hasOption("javareader")) {
            setIsJavaRecordReader(conf, true);
        }
        if (results.hasOption("map")) {
            setIsJavaMapper(conf, true);
            job.setMapperClass(getClass(results, "map", conf, Mapper.class));
        }
        if (results.hasOption("partitioner")) {
            job.setPartitionerClass(getClass(results, "partitioner", conf, Partitioner.class));
        }
        if (results.hasOption("reduce")) {
            setIsJavaReducer(conf, true);
            job.setReducerClass(getClass(results, "reduce", conf, Reducer.class));
        }
        if (results.hasOption("reduces")) {
            job.setNumReduceTasks(Integer.parseInt(results.getOptionValue("reduces")));
        }
        if (results.hasOption("writer")) {
            explicitOutputFormat = true;
            setIsJavaRecordWriter(conf, true);
            job.setOutputFormatClass(getClass(results, "writer", conf, OutputFormat.class));
        }
        if (results.hasOption("lazyOutput")) {
            if (Boolean.parseBoolean(results.getOptionValue("lazyOutput"))) {
                LazyOutputFormat.setOutputFormatClass(job, job.getOutputFormatClass());
            }
        }
        if (results.hasOption("avroInput")) {
            avroInput = AvroIO.valueOf(results.getOptionValue("avroInput").toUpperCase());
        }
        if (results.hasOption("avroOutput")) {
            avroOutput = AvroIO.valueOf(results.getOptionValue("avroOutput").toUpperCase());
        }

        if (results.hasOption("program")) {
            setExecutable(conf, results.getOptionValue("program"));
        }
        // if they gave us a jar file, include it into the class path
        String jarFile = job.getJar();
        if (jarFile != null) {
            final URL[] urls = new URL[] { FileSystem.getLocal(conf).pathToFile(new Path(jarFile)).toURL() };
            // FindBugs complains that creating a URLClassLoader should be
            // in a doPrivileged() block.
            ClassLoader loader = AccessController.doPrivileged(new PrivilegedAction<ClassLoader>() {
                public ClassLoader run() {
                    return new URLClassLoader(urls);
                }
            });
            conf.setClassLoader(loader);
        }
        setupPipesJob(job);
        return job.waitForCompletion(true) ? 0 : 1;
    } catch (ParseException pe) {
        LOG.info("Error : " + pe);
        cli.printUsage();
        return 1;
    }
}

From source file:ml.shifu.guagua.hadoop.io.GuaguaOptionsParser.java

License:Apache License

/**
 * Modify configuration according user-specified generic options
 * /*from ww  w .j a  v a2  s . co  m*/
 * @param conf
 *            Configuration to be modified
 * @param line
 *            User-specified generic options
 */
private void processGeneralOptions(Configuration conf, CommandLine line) throws IOException {
    if (line.hasOption("fs")) {
        FileSystem.setDefaultUri(conf, line.getOptionValue("fs"));
    }

    if (line.hasOption("jt")) {
        conf.set("mapred.job.tracker", line.getOptionValue("jt"));
    }
    if (line.hasOption("conf")) {
        String[] values = line.getOptionValues("conf");
        for (String value : values) {
            conf.addResource(new Path(value));
        }
    }
    if (line.hasOption("libjars")) {
        conf.set("tmpjars", validateFiles(line.getOptionValue("libjars"), conf));
        // setting libjars in client classpath
        URL[] libjars = getLibJars(conf);
        if (libjars != null && libjars.length > 0) {
            conf.setClassLoader(new URLClassLoader(libjars, conf.getClassLoader()));
            Thread.currentThread().setContextClassLoader(
                    new URLClassLoader(libjars, Thread.currentThread().getContextClassLoader()));
        }
    }
    if (line.hasOption("files")) {
        conf.set("tmpfiles", validateFiles(line.getOptionValue("files"), conf));
    }
    if (line.hasOption("archives")) {
        conf.set("tmparchives", validateFiles(line.getOptionValue("archives"), conf));
    }
    if (line.hasOption('D')) {
        String[] property = line.getOptionValues('D');
        for (String prop : property) {
            String[] keyval = prop.split("=", 2);
            if (keyval.length == 2) {
                conf.set(keyval[0], keyval[1]);
            }
        }
    }
    conf.setBoolean("mapred.used.genericoptionsparser", true);

    // tokensFile
    if (line.hasOption("tokenCacheFile")) {
        String fileName = line.getOptionValue("tokenCacheFile");
        // check if the local file exists
        try {
            FileSystem localFs = FileSystem.getLocal(conf);
            Path p = new Path(fileName);
            if (!localFs.exists(p)) {
                throw new FileNotFoundException("File " + fileName + " does not exist.");
            }

            LOG.debug("setting conf tokensFile: {}", fileName);
            conf.set("mapreduce.job.credentials.json", localFs.makeQualified(p).toString());
        } catch (IOException e) {
            throw new RuntimeException(e);
        }
    }
}

From source file:org.apache.druid.security.kerberos.DruidKerberosUtil.java

License:Apache License

public static void authenticateIfRequired(String internalClientPrincipal, String internalClientKeytab) {
    if (!Strings.isNullOrEmpty(internalClientPrincipal) && !Strings.isNullOrEmpty(internalClientKeytab)) {
        Configuration conf = new Configuration();
        conf.setClassLoader(DruidKerberosModule.class.getClassLoader());
        conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
        UserGroupInformation.setConfiguration(conf);
        try {// w  w w .  j ava2s .c om
            //login for the first time.
            if (UserGroupInformation.getCurrentUser().hasKerberosCredentials() == false
                    || !UserGroupInformation.getCurrentUser().getUserName().equals(internalClientPrincipal)) {
                log.info("trying to authenticate user [%s] with keytab [%s]", internalClientPrincipal,
                        internalClientKeytab);
                UserGroupInformation.loginUserFromKeytab(internalClientPrincipal, internalClientKeytab);
                return;
            }
            //try to relogin in case the TGT expired
            if (UserGroupInformation.isLoginKeytabBased()) {
                log.info("Re-Login from key tab [%s] with principal [%s]", internalClientKeytab,
                        internalClientPrincipal);
                UserGroupInformation.getLoginUser().checkTGTAndReloginFromKeytab();
                return;
            } else if (UserGroupInformation.isLoginTicketBased()) {
                log.info("Re-Login from Ticket cache");
                UserGroupInformation.getLoginUser().reloginFromTicketCache();
                return;
            }
        } catch (IOException e) {
            throw new ISE(e, "Failed to authenticate user principal [%s] with keytab [%s]",
                    internalClientPrincipal, internalClientKeytab);
        }
    }
}

From source file:org.apache.druid.storage.hdfs.HdfsStorageDruidModule.java

License:Apache License

@Override
public void configure(Binder binder) {
    MapBinder.newMapBinder(binder, String.class, SearchableVersionedDataFinder.class).addBinding(SCHEME)
            .to(HdfsFileTimestampVersionFinder.class).in(LazySingleton.class);

    Binders.dataSegmentPusherBinder(binder).addBinding(SCHEME).to(HdfsDataSegmentPusher.class)
            .in(LazySingleton.class);
    Binders.dataSegmentKillerBinder(binder).addBinding(SCHEME).to(HdfsDataSegmentKiller.class)
            .in(LazySingleton.class);

    final Configuration conf = new Configuration();

    // Set explicit CL. Otherwise it'll try to use thread context CL, which may not have all of our dependencies.
    conf.setClassLoader(getClass().getClassLoader());

    // Ensure that FileSystem class level initialization happens with correct CL
    // See https://github.com/apache/incubator-druid/issues/1714
    ClassLoader currCtxCl = Thread.currentThread().getContextClassLoader();
    try {/*from   w  w  w .jav  a 2s  .c om*/
        Thread.currentThread().setContextClassLoader(getClass().getClassLoader());
        FileSystem.get(conf);
    } catch (IOException ex) {
        throw new RuntimeException(ex);
    } finally {
        Thread.currentThread().setContextClassLoader(currCtxCl);
    }

    if (props != null) {
        for (String propName : props.stringPropertyNames()) {
            if (propName.startsWith("hadoop.")) {
                conf.set(propName.substring("hadoop.".length()), props.getProperty(propName));
            }
        }
    }

    binder.bind(Configuration.class).toInstance(conf);
    JsonConfigProvider.bind(binder, "druid.storage", HdfsDataSegmentPusherConfig.class);

    Binders.taskLogsBinder(binder).addBinding("hdfs").to(HdfsTaskLogs.class);
    JsonConfigProvider.bind(binder, "druid.indexer.logs", HdfsTaskLogsConfig.class);
    binder.bind(HdfsTaskLogs.class).in(LazySingleton.class);
    JsonConfigProvider.bind(binder, "druid.hadoop.security.kerberos", HdfsKerberosConfig.class);
    binder.bind(HdfsStorageAuthentication.class).in(ManageLifecycle.class);
    LifecycleModule.register(binder, HdfsStorageAuthentication.class);

}

From source file:org.apache.hama.pipes.util.DistributedCacheUtil.java

License:Apache License

/**
 * Add the JARs from the given HDFS paths to the Classpath
 * /*  ww w .ja  va  2s  . co  m*/
 * @param conf The job's configuration
 */
public static URL[] addJarsToJobClasspath(Configuration conf) {
    URL[] classLoaderURLs = ((URLClassLoader) conf.getClassLoader()).getURLs();
    String files = conf.get("tmpjars", "");

    if (!files.isEmpty()) {
        String[] fileArr = files.split(",");
        URL[] libjars = new URL[fileArr.length + classLoaderURLs.length];

        for (int i = 0; i < fileArr.length; i++) {
            String tmp = fileArr[i];

            URI pathURI;
            try {
                pathURI = new URI(tmp);
            } catch (URISyntaxException e) {
                throw new IllegalArgumentException(e);
            }

            try {
                FileSystem hdfs = FileSystem.get(conf);
                Path pathSrc = new Path(pathURI.getPath());
                // LOG.info("pathSrc: " + pathSrc);

                if (hdfs.exists(pathSrc)) {
                    LocalFileSystem local = LocalFileSystem.getLocal(conf);

                    // File dst = File.createTempFile(pathSrc.getName() + "-", ".jar");
                    Path pathDst = new Path(local.getWorkingDirectory(), pathSrc.getName());

                    LOG.debug("copyToLocalFile: " + pathDst);
                    hdfs.copyToLocalFile(pathSrc, pathDst);
                    local.deleteOnExit(pathDst);

                    libjars[i] = new URL(local.makeQualified(pathDst).toString());
                }

            } catch (IOException ex) {
                throw new RuntimeException("Error setting up classpath", ex);
            }
        }

        // Add old classLoader entries
        int index = fileArr.length;
        for (int i = 0; i < classLoaderURLs.length; i++) {
            libjars[index] = classLoaderURLs[i];
            index++;
        }

        // Set classloader in current conf/thread
        conf.setClassLoader(new URLClassLoader(libjars, conf.getClassLoader()));

        Thread.currentThread().setContextClassLoader(
                new URLClassLoader(libjars, Thread.currentThread().getContextClassLoader()));

        // URL[] urls = ((URLClassLoader) conf.getClassLoader()).getURLs();
        // for (URL u : urls)
        // LOG.info("newClassLoader: " + u.getPath());

        // Set tmpjars
        // hdfs to local path
        String jars = "";
        for (int i = 0; i < fileArr.length; i++) {
            URL url = libjars[i];
            if (jars.length() > 0) {
                jars += ",";
            }
            jars += url.toString();
        }
        conf.set("tmpjars", jars);

        return libjars;
    }
    return null;
}