Java tutorial
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.sina.dip.twill; import java.io.File; import java.net.URL; import java.net.URLClassLoader; import java.util.ArrayList; import java.util.List; import java.util.concurrent.ExecutionException; import org.apache.commons.lang.exception.ExceptionUtils; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.twill.api.AbstractTwillRunnable; import org.apache.twill.api.ClassAcceptor; import org.apache.twill.api.ResourceSpecification; import org.apache.twill.api.ResourceSpecification.SizeUnit; import org.apache.twill.api.TwillApplication; import org.apache.twill.api.TwillController; import org.apache.twill.api.TwillRunnerService; import org.apache.twill.api.TwillSpecification; import org.apache.twill.yarn.YarnTwillRunnerService; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.base.Splitter; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.util.concurrent.Futures; /** * Hello World example using twill-yarn to run a TwillApplication over YARN. */ public class HelloWorldClassDependent { public static final Logger LOG = LoggerFactory.getLogger(HelloWorldClassDependent.class); /** * Hello World runnable that is provided to TwillRunnerService to be run. */ private static class HelloWorldRunnable extends AbstractTwillRunnable { @Override public void run() { try { File jars = new File("jars"); if (jars.exists() && jars.isDirectory()) { File[] files = jars.listFiles(); List<URL> urls = new ArrayList<>(); for (File file : files) { urls.add(file.toURI().toURL()); } URLClassLoader classLoader = new URLClassLoader(urls.toArray(new URL[0])); classLoader.loadClass("com.sina.dip.twill.example.test.MyClass"); classLoader.close(); LOG.info("hello world load class success"); } } catch (Exception e) { LOG.error("class load error: " + ExceptionUtils.getFullStackTrace(e)); } } @Override public void stop() { } } private static class HelloWorldApplication implements TwillApplication { @Override public TwillSpecification configure() { return TwillSpecification.Builder.with().setName("HelloWorldApplication").withRunnable() .add("hello1", new HelloWorldRunnable(), ResourceSpecification.Builder.with().setVirtualCores(1).setMemory(2, SizeUnit.GIGA) .setInstances(1).build()) .withLocalFiles().add("jars", new File("/tmp/hello.tar.gz"), true).apply().anyOrder().build(); } } public static void main(String[] args) { String zkStr = "localhost:2181"; YarnConfiguration yarnConfiguration = new YarnConfiguration(); final TwillRunnerService twillRunner = new YarnTwillRunnerService(yarnConfiguration, zkStr); twillRunner.start(); String yarnClasspath = yarnConfiguration.get(YarnConfiguration.YARN_APPLICATION_CLASSPATH, "/usr/lib/hadoop/*,/usr/lib/hadoop-0.20-mapreduce/*,/usr/lib/hadoop-hdfs/*,/usr/lib/hadoop-mapreduce/*,/usr/lib/hadoop-yarn/*"); List<String> applicationClassPaths = Lists.newArrayList(); Iterables.addAll(applicationClassPaths, Splitter.on(",").split(yarnClasspath)); final TwillController controller = twillRunner.prepare(new HelloWorldApplication()) .withApplicationClassPaths(applicationClassPaths) .withBundlerClassAcceptor(new HadoopClassExcluder()).start(); Runtime.getRuntime().addShutdownHook(new Thread() { @Override public void run() { try { Futures.getUnchecked(controller.terminate()); } finally { twillRunner.stop(); } } }); try { controller.awaitTerminated(); } catch (ExecutionException e) { e.printStackTrace(); } } static class HadoopClassExcluder extends ClassAcceptor { @Override public boolean accept(String className, URL classUrl, URL classPathUrl) { // exclude hadoop but not hbase package return !(className.startsWith("org.apache.hadoop") && !className.startsWith("org.apache.hadoop.hbase")); } } }