Example usage for java.util.concurrent TimeUnit SECONDS

List of usage examples for java.util.concurrent TimeUnit SECONDS

Introduction

In this page you can find the example usage for java.util.concurrent TimeUnit SECONDS.

Prototype

TimeUnit SECONDS

To view the source code for java.util.concurrent TimeUnit SECONDS.

Click Source Link

Document

Time unit representing one second.

Usage

From source file:com.ikanow.aleph2.analytics.spark.assets.SparkScalaInterpreterTopology.java

public static void main(String[] args)
        throws InstantiationException, IllegalAccessException, ClassNotFoundException {

    final SetOnce<IBucketLogger> logger = new SetOnce<>();

    try {/*from   w ww . j a  v  a2s  .  com*/
        final Tuple2<IAnalyticsContext, Optional<ProcessingTestSpecBean>> aleph2_tuple = SparkTechnologyUtils
                .initializeAleph2(args);
        final IAnalyticsContext context = aleph2_tuple._1();
        final Optional<ProcessingTestSpecBean> test_spec = aleph2_tuple._2();

        logger.set(context.getLogger(context.getBucket()));

        // Optional: make really really sure it exists after the specified timeout
        SparkTechnologyUtils.registerTestTimeout(test_spec, () -> {
            System.exit(0);
        });

        //INFO:
        System.out.println("Starting SparkScalaInterpreterTopology logging=" + logger.optional().isPresent());

        logger.optional().ifPresent(l -> {
            l.inefficientLog(Level.INFO,
                    ErrorUtils.buildSuccessMessage("SparkScalaInterpreterTopology", "main",
                            "Starting SparkScalaInterpreterTopology.{0}",
                            Optionals.of(() -> context.getJob().get().name()).orElse("no_name")));
        });

        final SparkTopologyConfigBean job_config = BeanTemplateUtils
                .from(context.getJob().map(job -> job.config()).orElse(Collections.emptyMap()),
                        SparkTopologyConfigBean.class)
                .get();
        final String scala_script = Optional.ofNullable(job_config.script()).orElse("");

        final String wrapper_script = IOUtils.toString(
                SparkScalaInterpreterTopology.class.getClassLoader().getResourceAsStream("ScriptRunner.scala"),
                "UTF-8");
        final String to_compile = wrapper_script.replace("USER_SCRIPT", scala_script);
        final SparkCompilerService scs = new SparkCompilerService();
        final Tuple2<ClassLoader, Object> o = scs.buildClass(to_compile, "ScriptRunner", logger.optional());

        Thread.currentThread().setContextClassLoader(o._1());

        test_spec.ifPresent(spec -> System.out
                .println("OPTIONS: test_spec = " + BeanTemplateUtils.toJson(spec).toString()));

        SparkConf spark_context = new SparkConf().setAppName("SparkPassthroughTopology");

        final long streaming_batch_interval = (long) spark_context
                .getInt(SparkTopologyConfigBean.STREAMING_BATCH_INTERVAL, 10);

        // MAIN PROCESSING

        final Method m = o._2().getClass().getMethod("runScript", SparkScriptEngine.class);

        //DEBUG
        //final boolean test_mode = test_spec.isPresent(); // (serializable thing i can pass into the map)

        boolean is_streaming = context.getJob().map(j -> j.analytic_type())
                .map(t -> MasterEnrichmentType.streaming == t).orElse(false);
        final Either<JavaSparkContext, JavaStreamingContext> jsc = Lambdas.get(() -> {
            return is_streaming
                    ? Either.<JavaSparkContext, JavaStreamingContext>right(new JavaStreamingContext(
                            spark_context, Durations.seconds(streaming_batch_interval)))
                    : Either.<JavaSparkContext, JavaStreamingContext>left(new JavaSparkContext(spark_context));
        });
        try {
            final JavaSparkContext jsc_batch = jsc.either(l -> l, r -> r.sparkContext());

            final Multimap<String, JavaPairRDD<Object, Tuple2<Long, IBatchRecord>>> inputs = SparkTechnologyUtils
                    .buildBatchSparkInputs(context, test_spec, jsc_batch, Collections.emptySet());

            final Multimap<String, JavaPairDStream<String, Tuple2<Long, IBatchRecord>>> streaming_inputs = jsc
                    .<Multimap<String, JavaPairDStream<String, Tuple2<Long, IBatchRecord>>>>either(
                            l -> HashMultimap
                                    .<String, JavaPairDStream<String, Tuple2<Long, IBatchRecord>>>create(),
                            r -> SparkTechnologyUtils.buildStreamingSparkInputs(context, test_spec, r,
                                    Collections.emptySet()));

            final SparkScriptEngine script_engine_bridge = new SparkScriptEngine(context, inputs,
                    streaming_inputs, test_spec, jsc_batch, jsc.either(l -> null, r -> r), job_config);

            // Add driver and generated JARs to path:
            jsc_batch.addJar(LiveInjector.findPathJar(o._2().getClass()));

            m.invoke(o._2(), script_engine_bridge);

            jsc.either(l -> {
                l.stop();
                return null;
            }, r -> {
                r.stop();
                return null;
            });

            logger.optional().ifPresent(l -> {
                l.inefficientLog(Level.INFO,
                        ErrorUtils.buildSuccessMessage("SparkScalaInterpreterTopology", "main",
                                "Stopping SparkScalaInterpreterTopology.{0}",
                                Optionals.of(() -> context.getJob().get().name()).orElse("no_name")));
            });

            //INFO:
            System.out.println("Finished interpreter");
        } finally {
            jsc.either(l -> {
                l.close();
                return null;
            }, r -> {
                r.close();
                return null;
            });
        }
        logger.optional().ifPresent(Lambdas.wrap_consumer_u(l -> l.flush().get(10, TimeUnit.SECONDS)));
    } catch (Throwable t) {
        logger.optional().ifPresent(l -> {
            l.inefficientLog(Level.ERROR, ErrorUtils.buildSuccessMessage("SparkScalaInterpreterTopology",
                    "main",
                    ErrorUtils.getLongForm("Error executing SparkScalaInterpreterTopology.unknown: {0}", t)));
        });

        System.out.println(ErrorUtils.getLongForm("ERROR: {0}", t));
        logger.optional().ifPresent(Lambdas.wrap_consumer_u(l -> l.flush().get(10, TimeUnit.SECONDS)));
        System.exit(-1);
    }
}

From source file:Main.java

public static void sleep(int seconds) {
    try {//from ww w . j a  v a  2  s . c o  m
        TimeUnit.SECONDS.sleep(seconds);
    } catch (InterruptedException e) {
        throw new IllegalStateException(e);
    }
}

From source file:Main.java

public static void sleep(int seconds) {
    try {/*w  ww  .  j av  a 2 s  .  com*/
        TimeUnit.SECONDS.sleep(seconds);
    } catch (InterruptedException e) {
        e.printStackTrace();
    }
}

From source file:Main.java

public static String getTimeSpanFromSeconds(long secs) {
    if (TimeUnit.SECONDS.toHours(secs) == 0)
        return String.format("%dmin", TimeUnit.SECONDS.toMinutes(secs));
    else/*from   w w  w. j  a v a2  s  .  co m*/
        return String.format("%dh %02dmin", TimeUnit.SECONDS.toHours(secs),
                TimeUnit.SECONDS.toMinutes(secs) - TimeUnit.HOURS.toMinutes(TimeUnit.SECONDS.toHours(secs)));
}

From source file:Main.java

public static void sleep(int second, String threadName, String tag) {
    try {//  w  ww . j  a  v  a2 s.  c o m
        TimeUnit.SECONDS.sleep(second);
        System.out.println(threadName + " has sleep " + second + " seconds.\t tag : " + tag);
    } catch (InterruptedException e) {
    }
}

From source file:Main.java

public static void stop(ExecutorService executor) {
    try {/*from w ww. j  a v a2  s .  c o m*/
        executor.shutdown();
        executor.awaitTermination(60, TimeUnit.SECONDS);
    } catch (InterruptedException e) {
        e.printStackTrace();
    }
}

From source file:Main.java

public static void stop(ExecutorService executor) {
    try {/* ww w. ja v  a2 s. c  o m*/
        executor.shutdown();
        executor.awaitTermination(5, TimeUnit.SECONDS);
    } catch (InterruptedException e) {
        System.err.println("termination interrupted");
    } finally {
        if (!executor.isTerminated()) {
            System.err.println("killing non-finished tasks");
        }
        executor.shutdownNow();
    }
}

From source file:Main.java

public static void stop(ExecutorService executor) {
    try {/*from   w  ww. j  a v a 2s.  com*/
        executor.shutdown();
        executor.awaitTermination(60, TimeUnit.SECONDS);
    } catch (InterruptedException e) {
        System.err.println("termination interrupted");
    } finally {
        if (!executor.isTerminated()) {
            System.err.println("killing non-finished tasks");
        }
        executor.shutdownNow();
    }
}

From source file:Main.java

public static void shutdownPoolAndAwaitTermination(ThreadPoolExecutor pool) {
    try {//from   w ww  .  j av a 2 s . c o  m
        pool.shutdown();
        pool.awaitTermination(1000, TimeUnit.SECONDS);
    } catch (InterruptedException e) {
        e.printStackTrace();
    }
}

From source file:Main.java

private static <T> T readCallableResult(Future<T> future) throws Exception {

    try {/*from   w  w  w . j av a 2 s .c  o m*/
        return future.get(60, TimeUnit.SECONDS);
    } catch (TimeoutException e) {
        return null;
    } finally {
        future.cancel(true);
    }

}