Example usage for org.apache.hadoop.conf Configuration setClassLoader

List of usage examples for org.apache.hadoop.conf Configuration setClassLoader

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration setClassLoader.

Prototype

public void setClassLoader(ClassLoader classLoader) 

Source Link

Document

Set the class loader that will be used to load the various objects.

Usage

From source file:com.asakusafw.vanilla.testkit.inprocess.VanillaCommandEmulator.java

License:Apache License

@Override
public void execute(TestDriverContext context, ConfigurationFactory configurations,
        TestExecutionPlan.Command command) throws IOException, InterruptedException {
    List<String> arguments = getLaunchArguments(context, command);
    try (ClassLoaderContext cl = new ClassLoaderContext(loader(context))) {
        Configuration hadoop = configurations.newInstance();
        hadoop.setClassLoader(cl.getClassLoader());
        int exit = VanillaLauncher.exec(hadoop, arguments.toArray(new String[arguments.size()]));
        if (exit != 0) {
            throw new IOException(
                    MessageFormat.format("Asakusa Vanilla returned non-zero exit status: {0}", exit));
        }//from w  ww  .j  a v a 2  s.c o m
    } catch (LaunchConfigurationException e) {
        throw new IOException("Asakusa Vanilla configuration was failed", e);
    }
}

From source file:com.cloudera.sqoop.shims.ShimLoader.java

License:Apache License

@SuppressWarnings("unchecked")
/**/*from w  w w . j a v  a 2  s.co m*/
 * Actually load the shim for the current Hadoop version.
 * @param matchExprs a list of regexes against which the current Hadoop
 * version is compared. The first one to hit defines which class/jar to
 * use.
 * @param classNames a list in the same order as matchExprs. This defines
 * what class name to load as the shim class if the Hadoop version matches
 * matchExprs[i].
 * @param jarPatterns a list in the same order as matchExprs. This defines
 * a pattern to select a jar file from which the shim classes should be
 * loaded.
 * @param xface the shim interface that the shim class must match.
 * @param conf an optional Configuration whose context classloader should
 * be updated to the current Thread's contextClassLoader after pushing a
 * new ClassLoader on the stack to load this shim jar.
 */
private static <T> T loadShim(List<String> matchExprs, List<String> classNames, List<String> jarPatterns,
        Class<T> xface, Configuration conf) {
    String version = VersionInfo.getVersion();

    LOG.debug("Loading shims for class : " + xface.getName());
    LOG.debug("Hadoop version: " + version);

    for (int i = 0; i < matchExprs.size(); i++) {
        LOG.debug("Checking: " + matchExprs.get(i));
        if (version.matches(matchExprs.get(i))) {
            String className = classNames.get(i);
            String jarPattern = jarPatterns.get(i);

            if (LOG.isDebugEnabled()) {
                LOG.debug("Version matched regular expression: " + matchExprs.get(i));
                LOG.debug("Trying to load class: " + className);
            }

            // Test to see if the class is already on the classpath.
            try {
                // If we can load the shim directly, we just do so. In this case,
                // there's no need to update the Configuration's classloader,
                // because we didn't modify the classloader stack.
                return getShimInstance(className, xface);
            } catch (Exception e) {
                // Not already present. We'll need to load a jar for this.
                // Ignore this exception.
            }

            try {
                LOG.debug("Searching for jar matching: " + jarPattern);
                loadMatchingShimJar(jarPattern, className);
                LOG.debug("Loading shim from jar");
                T shim = getShimInstance(className, xface);

                if (null != conf) {
                    // Set the context classloader for the base Configuration to
                    // the current one, so we can load more classes from the shim jar.
                    conf.setClassLoader(Thread.currentThread().getContextClassLoader());
                }

                return shim;
            } catch (Exception e) {
                throw new RuntimeException("Could not load shim in class " + className, e);
            }
        }
    }

    throw new RuntimeException("Could not find appropriate Hadoop shim for " + version);
}

From source file:com.esri.AbstractTool.java

protected Configuration createConfiguration(final String propertiesPath) throws IOException {
    final Configuration configuration = new Configuration();
    configuration.setClassLoader(ClassLoader.getSystemClassLoader());
    loadProperties(configuration, propertiesPath);
    return configuration;
}

From source file:com.marklogic.contentpump.ContentPump.java

License:Apache License

/**
 * Set class loader for current thread and for Confifguration based on 
 * Hadoop home.//from   ww  w .j a va 2 s  .co  m
 * 
 * @param hdConfDir Hadoop home directory
 * @param conf Hadoop configuration
 * @throws MalformedURLException
 */
private static void setClassLoader(File hdConfDir, Configuration conf) throws Exception {
    ClassLoader parent = conf.getClassLoader();
    URL url = hdConfDir.toURI().toURL();
    URL[] urls = new URL[1];
    urls[0] = url;
    ClassLoader classLoader = new URLClassLoader(urls, parent);
    Thread.currentThread().setContextClassLoader(classLoader);
    conf.setClassLoader(classLoader);
}

From source file:com.taobao.datax.plugins.common.DFSUtils.java

License:Open Source License

/**
 * Get {@link Configuration}.//from   w w w  .  jav a 2s . c o m
 * 
 * @param dir
 *            directory path in hdfs
 * 
 * @param ugi
 *            hadoop ugi
 * 
 * @param conf
 *            hadoop-site.xml path
 * 
 * @return {@link Configuration}
 * 
 * @throws java.io.IOException*/

public static Configuration getConf(String dir, String ugi, String conf) throws IOException {

    URI uri = null;
    Configuration cfg = null;
    String scheme = null;
    try {
        uri = new URI(dir);
        scheme = uri.getScheme();
        if (null == scheme) {
            throw new IOException("HDFS Path missing scheme, check path begin with hdfs://ip:port/ .");
        }

        cfg = confs.get(scheme);
    } catch (URISyntaxException e) {
        throw new IOException(e.getMessage(), e.getCause());
    }

    if (cfg == null) {
        cfg = new Configuration();

        cfg.setClassLoader(DFSUtils.class.getClassLoader());

        List<String> configs = new ArrayList<String>();
        if (!StringUtils.isBlank(conf) && new File(conf).exists()) {
            configs.add(conf);
        } else {
            /*
             * For taobao internal use e.g. if bazhen.csy start a new datax
             * job, datax will use /home/bazhen.csy/config/hadoop-site.xml
             * as configuration xml
             */
            String confDir = System.getenv("HADOOP_CONF_DIR");

            if (null == confDir) {
                //for taobao internal use, it is ugly
                configs.add(System.getProperty("user.home") + "/config/hadoop-site.xml");
            } else {
                //run in hadoop-0.19
                if (new File(confDir + "/hadoop-site.xml").exists()) {
                    configs.add(confDir + "/hadoop-site.xml");
                } else {
                    configs.add(confDir + "/core-default.xml");
                    configs.add(confDir + "/core-site.xml");
                }
            }
        }

        for (String config : configs) {
            log.info(String.format("HdfsReader use %s for hadoop configuration .", config));
            cfg.addResource(new Path(config));
        }

        /* commented by bazhen.csy */
        // log.info("HdfsReader use default ugi " +
        // cfg.get(ParamsKey.HdfsReader.ugi));

        if (uri.getScheme() != null) {
            String fsname = String.format("%s://%s:%s", uri.getScheme(), uri.getHost(), uri.getPort());
            log.info("fs.default.name=" + fsname);
            cfg.set("fs.default.name", fsname);
        }
        if (ugi != null) {
            cfg.set("hadoop.job.ugi", ugi);

            /*
             * commented by bazhen.csy log.info("use specification ugi:" +
             * cfg.get(ParamsKey.HdfsReader.ugi));
             */
        }
        confs.put(scheme, cfg);
    }

    return cfg;
}

From source file:com.taobao.datax.plugins.common.DFSUtils.java

License:Open Source License

public static Configuration newConf() {
    Configuration conf = new Configuration();
    /*/* w w w  .  j a  v a2 s  .  co  m*/
     * it's weird, we need jarloader as the configuration's classloader but,
     * I don't know what does the fucking code means Why they need the
     * fucking currentThread ClassLoader If you know it, Pls add comment
     * below.
     * 
     * private ClassLoader classLoader; { classLoader =
     * Thread.currentThread().getContextClassLoader(); if (classLoader ==
     * null) { classLoader = Configuration.class.getClassLoader(); } }
     */
    conf.setClassLoader(DFSUtils.class.getClassLoader());

    return conf;
}

From source file:edu.ubc.mirrors.equinoxonhadoop.ToolFromEquinox.java

License:Open Source License

@Override
public void setConf(Configuration config) {
    Framework framework = createFramework(config);
    wrappedTool = loadFromFramework(framework, Tool.class);
    wrappedTool.setConf(config);/* w  w  w.  j a va 2  s .  c om*/

    ClassLoader osgiLoader = wrappedTool.getClass().getClassLoader();
    config.setClassLoader(osgiLoader);
    Thread.currentThread().setContextClassLoader(osgiLoader);
}

From source file:edu.uci.ics.pregelix.core.hadoop.config.ConfigurationFactory.java

License:Apache License

@Override
public Configuration createConfiguration() throws HyracksDataException {
    try {/*  w  w  w . j  a v a  2 s. c  o m*/
        Configuration conf = new Configuration();
        SerDeUtils.deserialize(conf, data);
        conf.setClassLoader(this.getClass().getClassLoader());
        return conf;
    } catch (Exception e) {
        throw new HyracksDataException(e);
    }
}

From source file:edu.uci.ics.pregelix.runtime.converter.ReadConverterFactory.java

License:Apache License

@SuppressWarnings("rawtypes")
@Override/*w ww.j a v a2  s  .c  om*/
public IReadConverter getReadConverter(IHyracksTaskContext ctx, int partitionId) throws HyracksDataException {
    final Configuration conf = confFactory.createConfiguration();
    // Set context properly
    ContextFactory ctxFactory = new ContextFactory();
    TaskAttemptContext mapperContext = ctxFactory.createContext(conf, partitionId);
    mapperContext.getConfiguration().setClassLoader(ctx.getJobletContext().getClassLoader());
    conf.setClassLoader(ctx.getJobletContext().getClassLoader());
    IterationUtils.setJobContext(BspUtils.getJobId(conf), ctx, mapperContext);
    Vertex.taskContext = mapperContext;

    final Vertex vertex = BspUtils.createVertex(conf);
    vertex.setVertexContext(IterationUtils.getVertexContext(BspUtils.getJobId(conf), ctx));

    final VertexInputConverter inputConverter = BspUtils.createVertexInputConverter(conf);

    return new IReadConverter() {

        @Override
        public void open(ARecordType recordType) throws HyracksDataException {
            inputConverter.open(recordType);
        }

        @Override
        public void convert(ARecordVisitablePointable recordPointable, ArrayTupleBuilder outputTb)
                throws HyracksDataException {
            try {
                // Converts an input AsterixDB record into an vertex object.
                vertex.reset();
                inputConverter.convert(recordPointable, vertex);

                // Outputs a tuple of <vertexId, vertex>.
                outputTb.reset();
                WritableComparable vertexId = vertex.getVertexId();
                DataOutput dos = outputTb.getDataOutput();
                vertexId.write(dos);
                outputTb.addFieldEndOffset();
                vertex.write(dos);
                outputTb.addFieldEndOffset();
            } catch (Exception e) {
                throw new HyracksDataException(e);
            }
        }

        @Override
        public void close() throws HyracksDataException {
            inputConverter.close();
        }

    };
}

From source file:edu.uci.ics.pregelix.runtime.converter.WriteConverterFactory.java

License:Apache License

@SuppressWarnings("rawtypes")
@Override//from   w w w .j  ava  2s .  c  o m
public IWriteConverter getFieldWriteConverter(IHyracksTaskContext ctx, int partitionId)
        throws HyracksDataException {
    final Configuration conf = confFactory.createConfiguration();
    // Set context properly
    ContextFactory ctxFactory = new ContextFactory();
    TaskAttemptContext mapperContext = ctxFactory.createContext(conf, partitionId);
    mapperContext.getConfiguration().setClassLoader(ctx.getJobletContext().getClassLoader());
    conf.setClassLoader(ctx.getJobletContext().getClassLoader());
    IterationUtils.setJobContext(BspUtils.getJobId(conf), ctx, mapperContext);
    Vertex.taskContext = mapperContext;

    final Vertex vertex = BspUtils.createVertex(conf);
    vertex.setVertexContext(IterationUtils.getVertexContext(BspUtils.getJobId(conf), ctx));

    final VertexOutputConverter outputConverter = BspUtils.createVertexOutputConverter(conf);
    final ResetableByteArrayInputStream inputStream = new ResetableByteArrayInputStream();
    final DataInput dataInput = new DataInputStream(inputStream);
    final RecordBuilder recordBuilder = new RecordBuilder();

    return new IWriteConverter() {

        @Override
        public void open(ARecordType recordType) throws HyracksDataException {
            recordBuilder.reset(recordType);
            outputConverter.open(recordType);
        }

        @Override
        public void convert(byte[] data, int start, int len, ArrayTupleBuilder outputTb)
                throws HyracksDataException {
            try {
                inputStream.setByteArray(data, start);
                vertex.readFields(dataInput);
                recordBuilder.init();
                outputTb.reset();
                outputConverter.convert(vertex.getVertexId(), outputTb.getDataOutput());
                outputTb.addFieldEndOffset();
                outputConverter.convert(vertex, recordBuilder);
                // By default, the record type tag is stored in AsterixDB.
                recordBuilder.write(outputTb.getDataOutput(), true);
                outputTb.addFieldEndOffset();
            } catch (Exception e) {
                throw new HyracksDataException(e);
            }
        }

        @Override
        public void close() throws HyracksDataException {
            outputConverter.close();
        }
    };
}