List of usage examples for org.apache.hadoop.fs FileSystem SHUTDOWN_HOOK_PRIORITY
int SHUTDOWN_HOOK_PRIORITY
To view the source code for org.apache.hadoop.fs FileSystem SHUTDOWN_HOOK_PRIORITY.
Click Source Link
From source file:co.cask.cdap.logging.framework.distributed.LogSaverTwillRunnable.java
License:Apache License
@Override protected Injector doInit(TwillContext context) { name = context.getSpecification().getName(); injector = createGuiceInjector(getCConfiguration(), getConfiguration(), context); // Register shutdown hook to stop Log Saver before Hadoop Filesystem shuts down ShutdownHookManager.get().addShutdownHook(new Runnable() { @Override//from w ww . j a v a2 s. c o m public void run() { LOG.info("Shutdown hook triggered."); stop(); } }, FileSystem.SHUTDOWN_HOOK_PRIORITY + 1); return injector; }
From source file:co.cask.cdap.logging.run.LogSaverTwillRunnable.java
License:Apache License
@Override public void run() { LOG.info("Starting runnable " + name); // Register shutdown hook to stop Log Saver before Hadoop Filesystem shuts down ShutdownHookManager.get().addShutdownHook(new Runnable() { @Override/*from w w w .j av a2 s. c om*/ public void run() { LOG.info("Shutdown hook triggered."); stop(); } }, FileSystem.SHUTDOWN_HOOK_PRIORITY + 1); Futures.getUnchecked(Services.chainStart(zkClientService, kafkaClientService, metricsCollectionService, logSaverService, logSaverStatusService)); LOG.info("Runnable started " + name); try { completion.get(); LOG.info("Runnable stopped " + name); } catch (InterruptedException e) { LOG.error("Waiting on completion interrupted", e); Thread.currentThread().interrupt(); } catch (ExecutionException e) { // Propagate the execution exception will causes TwillRunnable terminate with error, // and AM would detect and restarts it. LOG.error("Completed with exception. Exception get propagated", e); throw Throwables.propagate(e); } }
From source file:com.cloudera.oryx.lambda.HadoopUtils.java
License:Open Source License
/** * Adds a shutdown hook that tries to call {@link Closeable#close()} on the given argument * at JVM shutdown. This integrates with Hadoop's {@link ShutdownHookManager} in order to * better interact with Spark's usage of the same. * * @param closeable thing to close//from w ww . j av a2s. c o m */ public static void closeAtShutdown(Closeable closeable) { if (SHUTDOWN_HOOK.addCloseable(closeable)) { try { // Spark uses SHUTDOWN_HOOK_PRIORITY + 30; this tries to execute earlier ShutdownHookManager.get().addShutdownHook(SHUTDOWN_HOOK, FileSystem.SHUTDOWN_HOOK_PRIORITY + 40); } catch (IllegalStateException ise) { log.warn("Can't close {} at shutdown since shutdown is in progress", closeable); } } }