Example usage for java.lang Runnable run

List of usage examples for java.lang Runnable run

Introduction

In this page you can find the example usage for java.lang Runnable run.

Prototype

public abstract void run();

Source Link

Document

When an object implementing interface Runnable is used to create a thread, starting the thread causes the object's run method to be called in that separately executing thread.

Usage

From source file:at.ac.tuwien.qse.sepm.gui.controller.impl.SlideshowOrganizerImpl.java

@Override
public void setPresentAction(Runnable callback) {
    LOGGER.debug("setting present action");
    presentButton.setOnAction(event -> callback.run());
}

From source file:net.audumla.scheduler.quartz.QuartzScheduledExecutorService.java

@Override
public void execute(Runnable command) {
    command.run();
}

From source file:dk.nsi.haiba.lprimporter.status.StatusReporter.java

@RequestMapping(value = "/status")
public ResponseEntity<String> reportStatus() {

    String manual = request.getParameter("manual");
    if (manual == null || manual.trim().length() == 0) {
        // no value set, use default set in the import executor
        manual = "" + importExecutor.isManualOverride();
    } else {/*  w ww . j a v  a  2 s. c o m*/
        // manual flag is set on the request
        if (manual.equalsIgnoreCase("true")) {
            // flag is true, start the importer in a new thread
            importExecutor.setManualOverride(true);
            Runnable importer = new Runnable() {
                public void run() {
                    importExecutor.doProcess(true);
                }
            };
            importer.run();
        } else {
            importExecutor.setManualOverride(false);
        }
    }

    HttpHeaders headers = new HttpHeaders();
    String body = "OK";
    HttpStatus status = HttpStatus.INTERNAL_SERVER_ERROR;
    body = "OK";

    try {
        if (!statusRepo.isHAIBADBAlive()) {
            body = "HAIBA Database is _NOT_ running correctly";
        } else if (!statusRepo.isLPRDBAlive()) {
            body = "LPR Database is _NOT_ running correctly";
        } else if (statusRepo.isOverdue()) {
            // last run information is applied to body later
            body = "Is overdue";
        } else {
            status = HttpStatus.OK;
        }
    } catch (Exception e) {
        body = e.getMessage();
    }

    body += "</br>";
    body = addLastRunInformation(body);

    body += "</br>------------------</br>";

    String url = request.getRequestURL().toString();
    body += "<a href=\"" + url + "?manual=true\">Manual start importer</a>";
    body += "</br>";
    body += "<a href=\"" + url + "?manual=false\">Scheduled start importer</a>";
    body += "</br>";
    if (manual.equalsIgnoreCase("true")) {
        body += "status: MANUAL";
    } else {
        // default
        body += "status: SCHEDULED - " + cron;
    }

    headers.setContentType(MediaType.TEXT_HTML);

    return new ResponseEntity<String>(body, headers, status);
}

From source file:com.metamx.druid.indexing.common.index.YeOldePlumberSchool.java

@Override
public Plumber findPlumber(final Schema schema, final FireDepartmentMetrics metrics) {
    // There can be only one.
    final Sink theSink = new Sink(interval, schema, version);

    // Temporary directory to hold spilled segments.
    final File persistDir = new File(tmpSegmentDir, theSink.getSegment().getIdentifier());

    // Set of spilled segments. Will be merged at the end.
    final Set<File> spilled = Sets.newHashSet();

    return new Plumber() {
        @Override/*from  ww w.  j a  va 2 s. c o  m*/
        public void startJob() {

        }

        @Override
        public Sink getSink(long timestamp) {
            if (theSink.getInterval().contains(timestamp)) {
                return theSink;
            } else {
                return null;
            }
        }

        @Override
        public <T> QueryRunner<T> getQueryRunner(Query<T> query) {
            throw new UnsupportedOperationException("Don't query me, bro.");
        }

        @Override
        public void persist(Runnable commitRunnable) {
            spillIfSwappable();
            commitRunnable.run();
        }

        @Override
        public void finishJob() {
            // The segment we will upload
            File fileToUpload = null;

            try {
                // User should have persisted everything by now.
                Preconditions.checkState(!theSink.swappable(),
                        "All data must be persisted before fininshing the job!");

                if (spilled.size() == 0) {
                    throw new IllegalStateException("Nothing indexed?");
                } else if (spilled.size() == 1) {
                    fileToUpload = Iterables.getOnlyElement(spilled);
                } else {
                    List<QueryableIndex> indexes = Lists.newArrayList();
                    for (final File oneSpill : spilled) {
                        indexes.add(IndexIO.loadIndex(oneSpill));
                    }

                    fileToUpload = new File(tmpSegmentDir, "merged");
                    IndexMerger.mergeQueryableIndex(indexes, schema.getAggregators(), fileToUpload);
                }

                // Map merged segment so we can extract dimensions
                final QueryableIndex mappedSegment = IndexIO.loadIndex(fileToUpload);

                final DataSegment segmentToUpload = theSink.getSegment()
                        .withDimensions(ImmutableList.copyOf(mappedSegment.getAvailableDimensions()))
                        .withBinaryVersion(IndexIO.getVersionFromDir(fileToUpload));

                dataSegmentPusher.push(fileToUpload, segmentToUpload);

                log.info("Uploaded segment[%s]", segmentToUpload.getIdentifier());

            } catch (Exception e) {
                log.warn(e, "Failed to merge and upload");
                throw Throwables.propagate(e);
            } finally {
                try {
                    if (fileToUpload != null) {
                        log.info("Deleting Index File[%s]", fileToUpload);
                        FileUtils.deleteDirectory(fileToUpload);
                    }
                } catch (IOException e) {
                    log.warn(e, "Error deleting directory[%s]", fileToUpload);
                }
            }
        }

        private void spillIfSwappable() {
            if (theSink.swappable()) {
                final FireHydrant indexToPersist = theSink.swap();
                final int rowsToPersist = indexToPersist.getIndex().size();
                final File dirToPersist = getSpillDir(indexToPersist.getCount());

                log.info("Spilling index[%d] with rows[%d] to: %s", indexToPersist.getCount(), rowsToPersist,
                        dirToPersist);

                try {

                    IndexMerger.persist(indexToPersist.getIndex(), dirToPersist);

                    indexToPersist.swapSegment(null);

                    metrics.incrementRowOutputCount(rowsToPersist);

                    spilled.add(dirToPersist);

                } catch (Exception e) {
                    log.warn(e, "Failed to spill index[%d]", indexToPersist.getCount());
                    throw Throwables.propagate(e);
                }
            }
        }

        private File getSpillDir(final int n) {
            return new File(persistDir, String.format("spill%d", n));
        }
    };
}

From source file:net.orfjackal.retrolambda.test.LambdaTest.java

@Test
public void empty_lambda() {
    Runnable lambda = () -> {
    };

    lambda.run();
}

From source file:com.github.lothar.security.acl.jpa.repository.CustomerRepositoryTest.java

private void doWithoutCustomerSpec(Runnable runnable) {
    Specification<Customer> customerSpec = customerStrategy.uninstall(jpaSpecFeature);
    try {/*w  w  w .  ja va  2s .co  m*/
        runnable.run();
    } finally {
        customerStrategy.install(jpaSpecFeature, customerSpec);
    }
}

From source file:SingleThreadRequestExecutor.java

public void execute(final Runnable command) {
    executor.execute(new Runnable() {
        @Override/*from  w  w w.  j a  v a2s.  c  o m*/
        public void run() {
            try {
                command.run();
            } catch (Throwable e) {
                // This normally bad code of catch on Exception is here for a *reason*.
                // Future *eats* all exceptions *silently*. This clause at least allows
                // the exception to emit noise for debugging. This is particularly pernicious
                // if you have something like a NullPointerException

                e.printStackTrace();
                throw new RuntimeException(e);
            }
        }
    });
}

From source file:SingleThreadRequestExecutor.java

@Override
public Future<?> submit(final Runnable task) {
    return executor.submit(new Runnable() {
        @Override/*w  ww .  ja  va  2s  . com*/
        public void run() {
            try {
                task.run();
            } catch (Throwable e) {
                // This normally bad code of catch on Exception is here for a *reason*.
                // Future *eats* all exceptions *silently*. This clause at least allows
                // the exception to emit noise for debugging. This is particularly pernicious
                // if you have something like a NullPointerException

                e.printStackTrace();
                throw new RuntimeException(e);
            }
        }
    });
}

From source file:dk.nsi.haiba.minipasconverter.status.StatusReporter.java

@RequestMapping(value = "/status")
public ResponseEntity<String> reportStatus() {

    String manual = request.getParameter("manual");
    if (manual == null || manual.trim().length() == 0) {
        // no value set, use default set in the import executor
        manual = "" + minipasPreprocessor.isManualOverride();
    } else {/*w  ww.ja v  a 2  s.  c o  m*/
        // manual flag is set on the request
        if (manual.equalsIgnoreCase("true")) {
            // flag is true, start the importer in a new thread
            minipasPreprocessor.setManualOverride(true);
            Runnable importer = new Runnable() {
                public void run() {
                    minipasPreprocessor.doManualProcess();
                }
            };
            importer.run();
        } else {
            minipasPreprocessor.setManualOverride(false);
        }
    }

    HttpHeaders headers = new HttpHeaders();
    String body = "OK";
    HttpStatus status = HttpStatus.INTERNAL_SERVER_ERROR;
    body = "OK";

    try {
        if (!statusRepo.isHAIBADBAlive()) {
            body = "HAIBA Database is _NOT_ running correctly";
        } else if (statusRepo.isOverdue()) {
            // last run information is applied to body later
            body = "Is overdue";
        } else {
            status = HttpStatus.OK;
        }
    } catch (Exception e) {
        body = e.getMessage();
    }

    body += "</br>";
    body = addLastRunInformation(body);

    body += "</br>------------------</br>";

    String importProgress = currentImportProgress.getStatus();
    body += importProgress;

    body += "</br>------------------</br>";

    String url = request.getRequestURL().toString();

    body += "<a href=\"" + url + "?manual=true\">Manual start importer</a>";
    body += "</br>";
    body += "<a href=\"" + url + "?manual=false\">Scheduled start importer</a>";
    body += "</br>";
    if (manual.equalsIgnoreCase("true")) {
        body += "status: MANUAL";
    } else {
        // default
        body += "status: SCHEDULED - " + cron;
    }

    headers.setContentType(MediaType.TEXT_HTML);

    return new ResponseEntity<String>(body, headers, status);
}

From source file:org.fineract.module.stellar.FineractDocker.java

private void retry(final Runnable attempt, final int howLong) throws Exception {
    final long start = new Date().getTime();
    Exception lastE = null;// ww w  . j a va  2 s.  c  om
    long timeLeft = howLong;
    while (timeLeft > 0) {
        try {
            attempt.run();
            return;
        } catch (final Exception e) {
            lastE = e;
        }
        final long now = new Date().getTime();
        final long timePassed = now - start;
        timeLeft = howLong - timePassed;
        final long sleepTime = Math.max(3000, Math.min(timePassed, timeLeft));
        Thread.sleep(sleepTime);
    }

    if (lastE != null)
        throw lastE;

    throw new Exception("failed, and I don't know why");
}