Example usage for java.util.concurrent TimeUnit MINUTES

List of usage examples for java.util.concurrent TimeUnit MINUTES

Introduction

In this page you can find the example usage for java.util.concurrent TimeUnit MINUTES.

Prototype

TimeUnit MINUTES

To view the source code for java.util.concurrent TimeUnit MINUTES.

Click Source Link

Document

Time unit representing sixty seconds.

Usage

From source file:org.oncoblocks.centromere.dataimport.cli.CommandLineRunner.java

/**
 * From http://stackoverflow.com/a/6710604/1458983
 * Converts a long-formatted timespan into a human-readable string that denotes the length of time 
 *   that has elapsed.//from  www.ja  v  a2s .c o  m
 * @param l Long representation of a diff between two time stamps.
 * @return String formatted time span.
 */
private static String formatInterval(final long l) {
    final long hr = TimeUnit.MILLISECONDS.toHours(l);
    final long min = TimeUnit.MILLISECONDS.toMinutes(l - TimeUnit.HOURS.toMillis(hr));
    final long sec = TimeUnit.MILLISECONDS
            .toSeconds(l - TimeUnit.HOURS.toMillis(hr) - TimeUnit.MINUTES.toMillis(min));
    final long ms = TimeUnit.MILLISECONDS.toMillis(
            l - TimeUnit.HOURS.toMillis(hr) - TimeUnit.MINUTES.toMillis(min) - TimeUnit.SECONDS.toMillis(sec));
    return String.format("%02d:%02d:%02d.%03d", hr, min, sec, ms);
}

From source file:co.cask.cdap.internal.app.services.http.handlers.WorkflowStatsSLAHttpHandlerTest.java

@Test
public void testStatistics() throws Exception {

    deploy(WorkflowApp.class);
    String workflowName = "FunWorkflow";
    String mapreduceName = "ClassicWordCount";
    String sparkName = "SparkWorkflowTest";

    Id.Program workflowProgram = Id.Workflow.from(Id.Namespace.DEFAULT, "WorkflowApp", ProgramType.WORKFLOW,
            workflowName);//from  ww w.ja va  2  s . c o  m
    Id.Program mapreduceProgram = Id.Program.from(Id.Namespace.DEFAULT, "WorkflowApp", ProgramType.MAPREDUCE,
            mapreduceName);
    Id.Program sparkProgram = Id.Program.from(Id.Namespace.DEFAULT, "WorkflowApp", ProgramType.SPARK,
            sparkName);

    long startTime = System.currentTimeMillis();
    long currentTimeMillis = startTime;
    String outlierRunId = null;
    for (int i = 0; i < 10; i++) {
        // workflow runs every 5 minutes
        currentTimeMillis = startTime + (i * TimeUnit.MINUTES.toMillis(5));
        RunId workflowRunId = RunIds.generate(currentTimeMillis);
        store.setStart(workflowProgram, workflowRunId.getId(), RunIds.getTime(workflowRunId, TimeUnit.SECONDS));

        // MR job starts 2 seconds after workflow started
        RunId mapreduceRunid = RunIds.generate(currentTimeMillis + TimeUnit.SECONDS.toMillis(2));
        Map<String, String> systemArgs = ImmutableMap.of(ProgramOptionConstants.WORKFLOW_NODE_ID, mapreduceName,
                ProgramOptionConstants.WORKFLOW_NAME, workflowName, ProgramOptionConstants.WORKFLOW_RUN_ID,
                workflowRunId.getId());

        store.setStart(mapreduceProgram, mapreduceRunid.getId(),
                RunIds.getTime(mapreduceRunid, TimeUnit.SECONDS), null, ImmutableMap.<String, String>of(),
                systemArgs);

        store.setStop(mapreduceProgram, mapreduceRunid.getId(),
                // map-reduce job ran for 17 seconds
                TimeUnit.MILLISECONDS.toSeconds(currentTimeMillis) + 19, ProgramRunStatus.COMPLETED);

        // This makes sure that not all runs have Spark programs in them
        if (i < 5) {
            // spark starts 20 seconds after workflow starts
            RunId sparkRunid = RunIds.generate(currentTimeMillis + TimeUnit.SECONDS.toMillis(20));
            systemArgs = ImmutableMap.of(ProgramOptionConstants.WORKFLOW_NODE_ID, sparkProgram.getId(),
                    ProgramOptionConstants.WORKFLOW_NAME, workflowName, ProgramOptionConstants.WORKFLOW_RUN_ID,
                    workflowRunId.getId());
            store.setStart(sparkProgram, sparkRunid.getId(), RunIds.getTime(sparkRunid, TimeUnit.SECONDS), null,
                    ImmutableMap.<String, String>of(), systemArgs);

            // spark job runs for 38 seconds
            long stopTime = TimeUnit.MILLISECONDS.toSeconds(currentTimeMillis) + 58;
            if (i == 4) {
                // spark job ran for 100 seconds. 62 seconds greater than avg.
                stopTime = TimeUnit.MILLISECONDS.toSeconds(currentTimeMillis) + 120;
            }
            store.setStop(sparkProgram, sparkRunid.getId(), stopTime, ProgramRunStatus.COMPLETED);
        }

        // workflow ran for 1 minute
        long workflowStopTime = TimeUnit.MILLISECONDS.toSeconds(currentTimeMillis) + 60;
        if (i == 4) {
            // spark job ran longer for this run
            workflowStopTime = TimeUnit.MILLISECONDS.toSeconds(currentTimeMillis) + 122;
            outlierRunId = workflowRunId.getId();
        }

        store.setStop(workflowProgram, workflowRunId.getId(), workflowStopTime, ProgramRunStatus.COMPLETED);
    }

    String request = String.format(
            "%s/namespaces/%s/apps/%s/workflows/%s/statistics?start=%s&end=%s" + "&percentile=%s",
            Constants.Gateway.API_VERSION_3, Id.Namespace.DEFAULT.getId(), WorkflowApp.class.getSimpleName(),
            workflowProgram.getId(), TimeUnit.MILLISECONDS.toSeconds(startTime),
            TimeUnit.MILLISECONDS.toSeconds(currentTimeMillis) + TimeUnit.MINUTES.toSeconds(2), "99");

    HttpResponse response = doGet(request);
    WorkflowStatistics workflowStatistics = readResponse(response, new TypeToken<WorkflowStatistics>() {
    }.getType());
    PercentileInformation percentileInformation = workflowStatistics.getPercentileInformationList().get(0);
    Assert.assertEquals(1, percentileInformation.getRunIdsOverPercentile().size());
    Assert.assertEquals(outlierRunId, percentileInformation.getRunIdsOverPercentile().get(0));
    Assert.assertEquals("5", workflowStatistics.getNodes().get(sparkName).get("runs"));

    request = String.format(
            "%s/namespaces/%s/apps/%s/workflows/%s/statistics?start=%s&end=%s" + "&percentile=%s&percentile=%s",
            Constants.Gateway.API_VERSION_3, Id.Namespace.DEFAULT.getId(), WorkflowApp.class.getSimpleName(),
            workflowProgram.getId(), "now", "0", "90", "95");

    response = doGet(request);
    Assert.assertEquals(HttpResponseStatus.BAD_REQUEST.getCode(), response.getStatusLine().getStatusCode());

    request = String.format(
            "%s/namespaces/%s/apps/%s/workflows/%s/statistics?start=%s&end=%s" + "&percentile=%s&percentile=%s",
            Constants.Gateway.API_VERSION_3, Id.Namespace.DEFAULT.getId(), WorkflowApp.class.getSimpleName(),
            workflowProgram.getId(), "now", "0", "90.0", "950");

    response = doGet(request);
    Assert.assertEquals(HttpResponseStatus.BAD_REQUEST.getCode(), response.getStatusLine().getStatusCode());
}

From source file:me.hqm.plugindev.wget.WGET.java

public static void login(Player player) {
    WGUser alias = new WGUser();
    Db db = Db.open(DB_URL);/*from w  w  w .j  av  a  2 s . c o  m*/

    WGUser user = db.from(alias).where(alias.minecraftId).is(player.getUniqueId().toString()).selectFirst();
    user.sessionExpires = new Timestamp(
            System.currentTimeMillis() + TimeUnit.MINUTES.toMillis(DB_SESSION_MINS));
    user.lastKnownName = player.getName();

    db.update(user);
    db.close();

    player.performCommand("wget " + WGCommand.CACHE.get(player.getName()).toString());
}

From source file:com.groupon.odo.controllers.HomeController.java

@Bean
public EmbeddedServletContainerFactory servletContainer() {
    TomcatEmbeddedServletContainerFactory factory = new TomcatEmbeddedServletContainerFactory();

    int apiPort = Utils.getSystemPort(Constants.SYS_API_PORT);
    factory.setPort(apiPort);//from ww w  . j a v  a 2s. c  o m
    factory.setSessionTimeout(10, TimeUnit.MINUTES);
    factory.setContextPath("/testproxy");

    if (Utils.getEnvironmentOptionValue(Constants.SYS_LOGGING_DISABLED) != null) {
        HistoryService.getInstance().disableHistory();
    }
    return factory;
}

From source file:de.qucosa.servlet.MetsDisseminatorServlet.java

@Override
public void init() throws ServletException {
    super.init();

    startupProperties = new PropertyCollector().source(getServletContext()).source(System.getProperties())
            .collect();/*from   www.  j  a  v  a 2s  .  co m*/

    final FedoraClientFactory fedoraClientFactory = attemptToCreateFedoraClientFactoryFrom(startupProperties);

    if (fedoraClientFactory == null) {
        // we need a client factory for startup connections
        log.warn("Fedora connection credentials not configured. No connection pooling possible.");
    } else {
        final GenericObjectPoolConfig poolConfig = new GenericObjectPoolConfig();
        poolConfig.setMaxTotal(
                Integer.parseInt(startupProperties.getProperty(PROP_FEDORA_CONNECTIONPOOL_MAXSIZE, "20")));
        poolConfig.setMinIdle(5);
        poolConfig.setMinEvictableIdleTimeMillis(TimeUnit.MINUTES.toMillis(1));

        fedoraClientPool = new GenericObjectPool<>(fedoraClientFactory, poolConfig);

        log.info("Initialized Fedora connection pool.");
    }

    cacheManager = CacheManager.newInstance();
    cache = cacheManager.getCache("dscache");
}

From source file:rk.java.compute.cep.EventBusTest.java

@Test
public void processOnThreadPerCoreBasis() throws Exception {

    int NUMBER_OF_CORES = 8;
    ComputeService dispatcher = new ComputeService(numberOfMarketSourceInstances, NUMBER_OF_CORES,
            priceEventSink);//from   w w w . ja  v  a  2  s  . com
    dispatcher.subscribeToTickEventsFrom(eventBus);

    for (int i = 0; i < numberOfMarketSourceInstances; i++) {
        TradeFeed market = new TradeFeed(numberOfTicksPerProducer);
        market.setName("Market Maker " + i);
        market.setDaemon(true);
        market.publishTickEventsTo(eventBus);
        market.start();
    }

    StopWatch await = dispatcher.shutDownAndAwaitTermination(1, TimeUnit.MINUTES);
    System.out.println(await.prettyPrint());
    System.out.println(dispatcher);
    /*
    * Rem to add for poison pills when counting...
    */
    assertEquals(numberOfMarketSourceInstances * numberOfTicksPerProducer + numberOfMarketSourceInstances,
            dispatcher.getTicksReceivedCount());
    assertEquals(numberOfMarketSourceInstances * numberOfTicksPerProducer + numberOfMarketSourceInstances,
            dispatcher.getTicksProcessedCount());
}

From source file:com.yougou.api.interceptor.impl.AuthInterceptor.java

/**
 * API???//w  w w .  j av a2s  .co  m
 * 
 * @param apiId Api?Id
 * @return true|false
 */
private boolean isApiEnable(String apiId) {
    if (StringUtils.isBlank(apiId))
        return false;

    List<Map<String, String>> list = null;
    try {
        list = (List<Map<String, String>>) redisTemplate.opsForValue().get(API_ENABLE_REDIS_KEY);
    } catch (Exception e) {
        e.printStackTrace();
    }
    if (CollectionUtils.isEmpty(list)) {
        //?
        Session session = null;
        list = new ArrayList<Map<String, String>>();
        try {
            // ?API?
            session = apiLicenseDao.getHibernateSession();
            SQLQuery query = (SQLQuery) session
                    .createSQLQuery("select t.id, t.is_enable from tbl_merchant_api t ")
                    .addScalar("id", Hibernate.STRING).addScalar("is_enable", Hibernate.STRING)
                    .setResultTransformer(Transformers.ALIAS_TO_ENTITY_MAP);
            List<?> objs = query.list();
            for (Object object : objs) {
                list.add((Map<String, String>) object);
            }
            redisTemplate.opsForValue().set(API_ENABLE_REDIS_KEY, list, 5, TimeUnit.MINUTES);
            businessLogger.log(
                    MessageFormat.format("?API_ENABLE_REDIS_KEY?{0}",
                            CollectionUtils.isNotEmpty(list) ? list.size() : 0));
        } finally {
            apiLicenseDao.releaseHibernateSession(session);
        }
    }

    return this.isEnableApi(apiId, list);
}

From source file:com.jaspersoft.android.jaspermobile.test.real.RealReportViewerPageTest.java

@Override
public void setUp() throws Exception {
    super.setUp();

    mMockedSpiceManager = SmartMockedSpiceManager.builder().setIdlingResourceTimeout(3, TimeUnit.MINUTES)
            .setDebugable(true).setResponseChain(InputControlsList.class, ReportExecutionResponse.class)
            .build();/*  w  ww  . j  av a  2s.  c  om*/

    registerTestModule(new TestModule());
    setDefaultCurrentProfile();

    injector = WebViewInjector.registerFor(ReportHtmlViewerActivity_.class);
    setActivityIntent(ReportHtmlViewerActivity_.intent(getInstrumentation().getTargetContext())
            .resource(mResource).get());
    startActivityUnderTest();
}

From source file:it.smartcommunitylab.weliveplayer.managers.WeLivePlayerManager.java

/**
 * Apps Cache Generator./*ww w.  java  2s  . com*/
 * 
 * @throws ExecutionException
 */
//   @PostConstruct
public void init() throws ExecutionException {
    // create a cache for List<Artifact> based on city.
    artifactsPSA = CacheBuilder.newBuilder().expireAfterWrite(10, TimeUnit.MINUTES)
            .build(new CacheLoader<String, List<Artifact>>() {
                @Override
                public List<Artifact> load(String city) throws Exception {
                    List<Artifact> artifacts = getArtifacts(city, PSA);
                    return artifacts;
                }
            });

    artifactsBB = CacheBuilder.newBuilder().expireAfterWrite(10, TimeUnit.MINUTES)
            .build(new CacheLoader<String, List<Artifact>>() {
                @Override
                public List<Artifact> load(String city) throws Exception {
                    List<Artifact> artifacts = getArtifacts(city, BB);
                    return artifacts;
                }
            });

    artifactsDataset = CacheBuilder.newBuilder().expireAfterWrite(10, TimeUnit.MINUTES)
            .build(new CacheLoader<String, List<Artifact>>() {
                @Override
                public List<Artifact> load(String city) throws Exception {
                    List<Artifact> artifacts = getArtifacts(city, DS);
                    return artifacts;
                }
            });

    appCommentsCache = CacheBuilder.newBuilder().expireAfterWrite(10, TimeUnit.MINUTES)
            .build(new CacheLoader<String, List<Comment>>() {

                @Override
                public List<Comment> load(String appId) throws Exception {
                    List<Comment> comments = getComments(appId);
                    return comments;
                }

            });

}

From source file:br.unb.cic.bionimbuz.plugin.AbstractPlugin.java

@Override
public void start() {

    this.schedExecutorService.scheduleAtFixedRate(this, 0, 3, TimeUnit.MINUTES);
}