Java tutorial
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.sina.dip.twill; import java.io.PrintWriter; import java.net.URL; import java.util.Iterator; import java.util.List; import java.util.concurrent.ExecutionException; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.twill.api.AbstractTwillRunnable; import org.apache.twill.api.ClassAcceptor; import org.apache.twill.api.ResourceSpecification; import org.apache.twill.api.ResourceSpecification.SizeUnit; import org.apache.twill.api.TwillApplication; import org.apache.twill.api.TwillContext; import org.apache.twill.api.TwillController; import org.apache.twill.api.TwillRunnerService; import org.apache.twill.api.TwillSpecification; import org.apache.twill.api.logging.PrinterLogHandler; import org.apache.twill.discovery.Discoverable; import org.apache.twill.discovery.ServiceDiscovered; import org.apache.twill.yarn.YarnTwillRunnerService; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.base.Splitter; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.util.concurrent.Futures; /** * Hello World example using twill-yarn to run a TwillApplication over YARN. */ public class HelloWorldServiceDiscovery { public static final Logger LOG = LoggerFactory.getLogger(HelloWorldServiceDiscovery.class); /** * Hello World runnable that is provided to TwillRunnerService to be run. */ private static class HelloWorldRunnable extends AbstractTwillRunnable { @Override public void initialize(TwillContext context) { context.announce("HelloWorldService", context.getInstanceId()); } @Override public void run() { try { Thread.sleep(3 * 60 * 1000); } catch (Exception e) { } } @Override public void stop() { } } /** * Hello World runnable2 that is provided to TwillRunnerService to be run. */ private static class HelloWorldRunnable2 extends AbstractTwillRunnable { @Override public void initialize(TwillContext context) { context.announce("HelloWorldService2", context.getInstanceId()); } @Override public void run() { try { Thread.sleep(3 * 60 * 1000); } catch (Exception e) { } } @Override public void stop() { } } private static class HelloWorldApplication implements TwillApplication { @Override public TwillSpecification configure() { return TwillSpecification.Builder.with().setName("HelloWorldApplication").withRunnable() .add("hello1", new HelloWorldRunnable(), ResourceSpecification.Builder .with().setVirtualCores(1).setMemory(2, SizeUnit.GIGA).setInstances(2).build()) .noLocalFiles() .add("hello2", new HelloWorldRunnable2(), ResourceSpecification.Builder.with() .setVirtualCores(1).setMemory(1, SizeUnit.GIGA).setInstances(3).build()) .noLocalFiles().anyOrder().build(); } } public static void main(String[] args) { String zkStr = "localhost:2181"; YarnConfiguration yarnConfiguration = new YarnConfiguration(); final TwillRunnerService twillRunner = new YarnTwillRunnerService(yarnConfiguration, zkStr); twillRunner.start(); String yarnClasspath = yarnConfiguration.get(YarnConfiguration.YARN_APPLICATION_CLASSPATH, "/usr/lib/hadoop/*,/usr/lib/hadoop-0.20-mapreduce/*,/usr/lib/hadoop-hdfs/*,/usr/lib/hadoop-mapreduce/*,/usr/lib/hadoop-yarn/*"); List<String> applicationClassPaths = Lists.newArrayList(); Iterables.addAll(applicationClassPaths, Splitter.on(",").split(yarnClasspath)); final TwillController controller = twillRunner.prepare(new HelloWorldApplication()) .addLogHandler(new PrinterLogHandler(new PrintWriter(System.out, true))) .withApplicationClassPaths(applicationClassPaths) .withBundlerClassAcceptor(new HadoopClassExcluder()).start(); ServiceDiscovered helloWorldService = controller.discoverService("HelloWorldService"); ServiceDiscovered helloWorldService2 = controller.discoverService("HelloWorldService2"); int count = 0; while (true) { boolean flag = true; Iterator<Discoverable> iterator = helloWorldService.iterator(); while (iterator.hasNext()) { Discoverable discoverable = iterator.next(); System.out.println(discoverable.getName() + " : " + discoverable.getSocketAddress()); flag = false; } iterator = helloWorldService2.iterator(); while (iterator.hasNext()) { Discoverable discoverable = iterator.next(); System.out.println(discoverable.getName() + " : " + discoverable.getSocketAddress()); flag = false; } try { Thread.sleep(5 * 1000); } catch (InterruptedException e) { } if (++count >= 36 && flag) { break; } } Runtime.getRuntime().addShutdownHook(new Thread() { @Override public void run() { try { Futures.getUnchecked(controller.terminate()); } finally { twillRunner.stop(); } } }); try { controller.awaitTerminated(); } catch (ExecutionException e) { e.printStackTrace(); } } static class HadoopClassExcluder extends ClassAcceptor { @Override public boolean accept(String className, URL classUrl, URL classPathUrl) { // exclude hadoop but not hbase package return !(className.startsWith("org.apache.hadoop") && !className.startsWith("org.apache.hadoop.hbase")); } } }