Java tutorial
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.sina.dip.twill; import java.io.PrintWriter; import java.net.URL; import java.util.List; import java.util.concurrent.ExecutionException; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.twill.api.AbstractTwillRunnable; import org.apache.twill.api.ClassAcceptor; import org.apache.twill.api.ResourceSpecification; import org.apache.twill.api.ResourceSpecification.SizeUnit; import org.apache.twill.api.TwillController; import org.apache.twill.api.TwillRunnerService; import org.apache.twill.api.logging.PrinterLogHandler; import org.apache.twill.yarn.YarnTwillRunnerService; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.base.Splitter; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.util.concurrent.Futures; /** * Hello World example using twill-yarn to run a TwillApplication over YARN. */ public class HelloWorldResourcesSpecification { public static final Logger LOG = LoggerFactory.getLogger(HelloWorldResourcesSpecification.class); /** * Hello World runnable that is provided to TwillRunnerService to be run. */ private static class HelloWorldRunnable extends AbstractTwillRunnable { @Override public void run() { try { Thread.sleep(10 * 60 * 1000); } catch (Exception e) { } LOG.info("Hello World. My first distributed application."); } @Override public void stop() { } } public static void main(String[] args) { String zkStr = "localhost:2181"; YarnConfiguration yarnConfiguration = new YarnConfiguration(); final TwillRunnerService twillRunner = new YarnTwillRunnerService(yarnConfiguration, zkStr); twillRunner.start(); String yarnClasspath = yarnConfiguration.get(YarnConfiguration.YARN_APPLICATION_CLASSPATH, "/usr/lib/hadoop/*,/usr/lib/hadoop-0.20-mapreduce/*,/usr/lib/hadoop-hdfs/*,/usr/lib/hadoop-mapreduce/*,/usr/lib/hadoop-yarn/*"); List<String> applicationClassPaths = Lists.newArrayList(); Iterables.addAll(applicationClassPaths, Splitter.on(",").split(yarnClasspath)); final TwillController controller = twillRunner .prepare(new HelloWorldRunnable(), ResourceSpecification.Builder.with().setVirtualCores(1).setMemory(2, SizeUnit.GIGA) .setInstances(3).build()) .addLogHandler(new PrinterLogHandler(new PrintWriter(System.out, true))) .withApplicationClassPaths(applicationClassPaths) .withBundlerClassAcceptor(new HadoopClassExcluder()).start(); Runtime.getRuntime().addShutdownHook(new Thread() { @Override public void run() { try { Futures.getUnchecked(controller.terminate()); } finally { twillRunner.stop(); } } }); try { controller.awaitTerminated(); } catch (ExecutionException e) { e.printStackTrace(); } } static class HadoopClassExcluder extends ClassAcceptor { @Override public boolean accept(String className, URL classUrl, URL classPathUrl) { // exclude hadoop but not hbase package return !(className.startsWith("org.apache.hadoop") && !className.startsWith("org.apache.hadoop.hbase")); } } }