Example usage for java.util.concurrent TimeUnit DAYS

List of usage examples for java.util.concurrent TimeUnit DAYS

Introduction

In this page you can find the example usage for java.util.concurrent TimeUnit DAYS.

Prototype

TimeUnit DAYS

To view the source code for java.util.concurrent TimeUnit DAYS.

Click Source Link

Document

Time unit representing twenty four hours.

Usage

From source file:org.apache.marmotta.kiwi.infinispan.embedded.InfinispanEmbeddedCacheManager.java

/**
 * Return the URI -> namespace cache from the cache manager. Used for looking up namespaces
 * @return//w  w w.  j  a  v a  2  s. co m
 */
public Map getNamespaceUriCache() {
    if (nsUriCache == null) {
        if (isClustered()) {
            Configuration nsuriConfiguration = new ConfigurationBuilder().read(defaultConfiguration)
                    .clustering().cacheMode(CacheMode.REPL_ASYNC).eviction()
                    .maxEntries(config.getNamespaceCacheSize()).expiration().lifespan(1, TimeUnit.DAYS).build();
            cacheManager.defineConfiguration(NS_URI_CACHE, nsuriConfiguration);
        } else {
            Configuration nsuriConfiguration = new ConfigurationBuilder().read(defaultConfiguration).eviction()
                    .maxEntries(config.getNamespaceCacheSize()).expiration().lifespan(1, TimeUnit.HOURS)
                    .build();
            cacheManager.defineConfiguration(NS_URI_CACHE, nsuriConfiguration);
        }

        nsUriCache = new AsyncMap(cacheManager.getCache(NS_URI_CACHE).getAdvancedCache()
                .withFlags(Flag.SKIP_LOCKING, Flag.SKIP_CACHE_LOAD, Flag.SKIP_REMOTE_LOOKUP));
    }
    return nsUriCache;
}

From source file:uk.codingbadgers.SurvivalPlus.SurvivalPlus.java

/**
 * @param time//from  w  ww .j  a v a  2  s  . co  m
 * @return
 */
public static String formatTime(Long time) {

    Long days = TimeUnit.MILLISECONDS.toDays(time);
    time = time - TimeUnit.DAYS.toMillis(days);

    Long hours = TimeUnit.MILLISECONDS.toHours(time);
    time = time - TimeUnit.HOURS.toMillis(hours);

    Long minutes = TimeUnit.MILLISECONDS.toMinutes(time);
    time = time - TimeUnit.MINUTES.toMillis(minutes);

    Long seconds = TimeUnit.MILLISECONDS.toSeconds(time);

    String formattedTime = "";
    if (days != 0) {
        formattedTime += (days == 1 ? "1 Day" : days + " Days");
    }

    if (hours != 0) {
        if (days != 0) {
            formattedTime += ", ";
        }
        formattedTime += (hours == 1 ? "1 Hour" : hours + " Hours");
    }

    if (minutes != 0) {
        if (days != 0 || hours != 0) {
            formattedTime += ", ";
        }
        formattedTime += (minutes == 1 ? "1 Minute" : minutes + " Minutes");
    }

    if (seconds != 0) {
        if (days != 0 || hours != 0 || minutes != 0) {
            formattedTime += ", ";
        }
        formattedTime += (seconds == 1 ? "1 Second" : seconds + " Seconds");
    }

    return formattedTime;
}

From source file:com.pro.gen.android.MainActivity.java

public DataReadRequest queryFitnessDataSince(long time) {
    // [START build_read_data_request]
    // Setting a start and end date using a range of 1 week before this moment.
    Calendar cal = Calendar.getInstance();
    Date before = new Date(time);
    cal.setTime(before);/*from  www . j  a v a2s. co  m*/
    long startTime = cal.getTimeInMillis();

    long endTime = new Date().getTime();

    java.text.DateFormat dateFormat = getDateInstance();
    Log.i(TAG, "Range Start: " + dateFormat.format(startTime));
    Log.i(TAG, "Range End: " + dateFormat.format(endTime));

    DataReadRequest readRequest = new DataReadRequest.Builder()
            // The data request can specify multiple data types to return, effectively
            // combining multiple data queries into one call.
            // In this example, it's very unlikely that the request is for several hundred
            // datapoints each consisting of a few steps and a timestamp.  The more likely
            // scenario is wanting to see how many steps were walked per day, for 7 days.
            .aggregate(DataType.TYPE_STEP_COUNT_DELTA, DataType.AGGREGATE_STEP_COUNT_DELTA)
            // Analogous to a "Group By" in SQL, defines how data should be aggregated.
            // bucketByTime allows for a time span, whereas bucketBySession would allow
            // bucketing by "sessions", which would need to be defined in code.
            .bucketByTime(1, TimeUnit.DAYS).setTimeRange(startTime, endTime, TimeUnit.MILLISECONDS).build();
    // [END build_read_data_request]

    return readRequest;
}

From source file:org.wso2.siddhi.extension.time.DateDifferenceFunctionExtension.java

@Override
protected Object execute(Object[] data) {

    String firstDate = null;/*from   w w w .  j  a  v a2s . c o m*/
    String secondDate;
    FastDateFormat userSpecifiedFirstFormat;
    FastDateFormat userSpecifiedSecondFormat;

    if (data.length == 4 || useDefaultDateFormat) {
        try {
            if (data[0] == null) {
                throw new ExecutionPlanRuntimeException("Invalid input given to time:dateDiff(dateValue1,"
                        + "dateValue2,dateFormat1,dateFormat2) function" + ". First "
                        + "argument cannot be null");
            }
            if (data[1] == null) {
                throw new ExecutionPlanRuntimeException("Invalid input given to time:dateDiff(dateValue1,"
                        + "dateValue2,dateFormat1,dateFormat2) function" + ". Second "
                        + "argument cannot be null");
            }

            if (!useDefaultDateFormat) {
                if (data[2] == null) {
                    throw new ExecutionPlanRuntimeException("Invalid input given to time:dateDiff(dateValue1,"
                            + "dateValue2,dateFormat1,dateFormat2) function" + ". Third "
                            + "argument cannot be null");
                }
                if (data[3] == null) {
                    throw new ExecutionPlanRuntimeException("Invalid input given to time:dateDiff(dateValue1,"
                            + "dateValue2,dateFormat1,dateFormat2) function" + ". Fourth "
                            + "argument cannot be null");
                }
                firstDateFormat = (String) data[2];
                secondDateFormat = (String) data[3];
            } else {
                if (data.length != 2) {
                    firstDateFormat = (String) data[2];
                }
            }
            firstDate = (String) data[0];
            secondDate = (String) data[1];
            userSpecifiedFirstFormat = FastDateFormat.getInstance(firstDateFormat);
            userSpecifiedSecondFormat = FastDateFormat.getInstance(secondDateFormat);
            Date userSpecifiedFirstDate = userSpecifiedFirstFormat.parse(firstDate);
            firstCalInstance.setTime(userSpecifiedFirstDate);
        } catch (ParseException e) {
            String errorMsg = "Provided format " + firstDateFormat + " does not match with the timestamp "
                    + firstDate + e.getMessage();
            throw new ExecutionPlanRuntimeException(errorMsg, e);
        } catch (ClassCastException e) {
            String errorMsg = "Provided Data type cannot be cast to desired format. " + e.getMessage();
            throw new ExecutionPlanRuntimeException(errorMsg, e);
        }

        try {
            Date userSpecifiedSecondDate = userSpecifiedSecondFormat.parse(secondDate);
            secondCalInstance.setTime(userSpecifiedSecondDate);
        } catch (ParseException e) {
            String errorMsg = "Provided format " + secondDateFormat + " does not match with the timestamp "
                    + secondDate + e.getMessage();
            throw new ExecutionPlanRuntimeException(errorMsg, e);
        }

    } else if (data.length == 2) {

        if (data[0] == null) {
            throw new ExecutionPlanRuntimeException("Invalid input given to time:dateDiff"
                    + "(timestampInMilliseconds1,timestampInMilliseconds2) function" + ". First "
                    + "argument cannot be null");
        }
        if (data[1] == null) {
            throw new ExecutionPlanRuntimeException("Invalid input given to time:dateDiff"
                    + "(timestampInMilliseconds1,timestampInMilliseconds2) function" + ". Second "
                    + "argument cannot be null");
        }

        try {
            long firstDateInMills = (Long) data[0];
            long secondDateInMills = (Long) data[1];
            firstCalInstance.setTimeInMillis(firstDateInMills);
            secondCalInstance.setTimeInMillis(secondDateInMills);
        } catch (ClassCastException e) {
            String errorMsg = "Provided Data type cannot be cast to desired format. " + e.getMessage();
            throw new ExecutionPlanRuntimeException(errorMsg, e);
        }
    } else {
        throw new ExecutionPlanRuntimeException("Invalid set of arguments given to time:dateDiff() function."
                + "Arguments should be either 2 or 4. ");
    }

    long dateDifference = firstCalInstance.getTimeInMillis() - secondCalInstance.getTimeInMillis();
    return TimeUnit.DAYS.convert(dateDifference, TimeUnit.MILLISECONDS);
}

From source file:com.jivesoftware.os.upena.service.UpenaStore.java

private EmbeddedClient changeLogClient() throws Exception {
    PartitionProperties partitionProperties = new PartitionProperties(Durability.fsync_async,
            TimeUnit.DAYS.toMillis(30), TimeUnit.DAYS.toMillis(10), TimeUnit.DAYS.toMillis(30),
            TimeUnit.DAYS.toMillis(10), TimeUnit.DAYS.toMillis(30), TimeUnit.DAYS.toMillis(10),
            TimeUnit.DAYS.toMillis(30), TimeUnit.DAYS.toMillis(10), false, Consistency.quorum, true, true,
            false, RowType.snappy_primary, "lab", -1, null, -1, -1);

    PartitionName partitionName = getPartitionName("change-log");
    while (true) {
        try {//from  w  ww. ja v a2  s  .  c  om
            amzaService.getRingWriter().ensureMaximalRing(partitionName.getRingName(), 30_000L); //TODO config
            amzaService.createPartitionIfAbsent(partitionName, partitionProperties);
            amzaService.awaitOnline(partitionName, 30_000L); //TODO config
            return embeddedClientProvider.getClient(partitionName, CheckOnline.once);
        } catch (Exception x) {
            LOG.warn("Failed to get client for " + partitionName.getName() + ". Retrying...", x);
        }
    }
}

From source file:io.gromit.geolite2.GeoLocation.java

/**
 * Start.//from w w  w .j ava2s .com
 *
 * @return the scheduled database reader
 * @throws IllegalStateException the illegal state exception
 */
public GeoLocation start() throws IllegalStateException {
    if (scheduledExecutorService != null) {
        throw new IllegalStateException("it is already started");
    }
    readDatabase();
    scheduledExecutorService = Executors.newScheduledThreadPool(1);
    scheduledExecutorService.scheduleAtFixedRate(new Runnable() {
        @Override
        public void run() {
            readDatabase();
        }
    }, 1, 1, TimeUnit.DAYS);
    return this;
}

From source file:com.linkedin.pinot.queries.QueriesSentinelTest.java

private void setupSegmentFor(String table) throws Exception {
    final String filePath = TestUtils
            .getFileFromResourceUrl(getClass().getClassLoader().getResource(AVRO_DATA));

    if (INDEX_DIR.exists()) {
        FileUtils.deleteQuietly(INDEX_DIR);
    }/*  www.ja  v  a 2 s .  c o  m*/
    INDEX_DIR.mkdir();

    final SegmentGeneratorConfig config = SegmentTestUtils.getSegmentGenSpecWithSchemAndProjectedColumns(
            new File(filePath), new File(INDEX_DIR, "segment"), "daysSinceEpoch", TimeUnit.DAYS, table);

    final SegmentIndexCreationDriver driver = new SegmentIndexCreationDriverImpl();

    driver.init(config);
    driver.build();

    System.out.println("built at : " + INDEX_DIR.getAbsolutePath());
}

From source file:org.apache.metron.dataloads.bulk.ElasticsearchDataPrunerTest.java

@Test
public void testFilter() throws Exception {

    ObjectObjectHashMap<String, IndexMetaData> indexNames = new ObjectObjectHashMap();
    SimpleDateFormat dateChecker = new SimpleDateFormat("yyyyMMdd");
    int numDays = 5;
    String[] expectedIndices = new String[24];
    Date indexDate = new Date();

    indexDate.setTime(testDate.getTime() - TimeUnit.DAYS.toMillis(numDays));

    for (int i = 0, j = 0; i < numDays * 24; i++) {

        String indexName = "sensor_index_" + dateFormat.format(indexDate);
        //Delete 20160330
        if (dateChecker.format(indexDate).equals("20160330")) {
            expectedIndices[j++] = indexName;
        }//from  w w  w . j  av  a2  s .com

        indexNames.put(indexName, null);
        indexDate.setTime(indexDate.getTime() + TimeUnit.HOURS.toMillis(1));

    }

    ImmutableOpenMap<String, IndexMetaData> testIndices = ImmutableOpenMap.copyOf(indexNames);

    ElasticsearchDataPruner pruner = new ElasticsearchDataPruner(testDate, 1, configuration, indexClient,
            "sensor_index_");
    pruner.indexClient = indexClient;

    Iterable<String> filteredIndices = pruner.getFilteredIndices(testIndices);

    Object[] indexArray = IteratorUtils.toArray(filteredIndices.iterator());
    Arrays.sort(indexArray);
    Arrays.sort(expectedIndices);

    assertArrayEquals(expectedIndices, indexArray);

}

From source file:com.atypon.wayf.guice.WayfGuiceModule.java

@Provides
@Named("publisherSaltCache")
@Singleton/*w  w  w  . java 2s . c  o  m*/
public Cache<Long, String> getPublisherSaltLoadingCache(
        @Named("publisherSaltRedisCache") LoadingCache<Long, String> publisherSaltRedisCache) {
    LoadingCacheGuavaImpl<Long, String> l1Cache = new LoadingCacheGuavaImpl<>();
    l1Cache.setGuavaCache(CacheBuilder.newBuilder().expireAfterWrite(1, TimeUnit.DAYS).build());
    l1Cache.setCacheLoader((key) -> publisherSaltRedisCache.get(key));

    return l1Cache;
}

From source file:org.opendaylight.controller.cluster.raft.RaftActorTest.java

@Test
public void testRaftActorForwardsToRaftActorRecoverySupport() {
    String persistenceId = factory.generateActorId("leader-");

    DefaultConfigParamsImpl config = new DefaultConfigParamsImpl();

    config.setHeartBeatInterval(new FiniteDuration(1, TimeUnit.DAYS));

    TestActorRef<MockRaftActor> mockActorRef = factory.createTestActor(
            MockRaftActor.props(persistenceId, Collections.<String, String>emptyMap(), config), persistenceId);

    MockRaftActor mockRaftActor = mockActorRef.underlyingActor();

    // Wait for akka's recovery to complete so it doesn't interfere.
    mockRaftActor.waitForRecoveryComplete();

    RaftActorRecoverySupport mockSupport = mock(RaftActorRecoverySupport.class);
    mockRaftActor.setRaftActorRecoverySupport(mockSupport);

    Snapshot snapshot = Snapshot.create(new byte[] { 1 }, Collections.<ReplicatedLogEntry>emptyList(), 3, 1, 3,
            1);//w  ww  .  j  a  v  a  2  s.co  m
    SnapshotOffer snapshotOffer = new SnapshotOffer(new SnapshotMetadata("test", 6, 12345), snapshot);
    mockRaftActor.handleRecover(snapshotOffer);

    MockRaftActorContext.MockReplicatedLogEntry logEntry = new MockRaftActorContext.MockReplicatedLogEntry(1, 1,
            new MockRaftActorContext.MockPayload("1", 5));
    mockRaftActor.handleRecover(logEntry);

    ApplyJournalEntries applyJournalEntries = new ApplyJournalEntries(2);
    mockRaftActor.handleRecover(applyJournalEntries);

    DeleteEntries deleteEntries = new DeleteEntries(1);
    mockRaftActor.handleRecover(deleteEntries);

    UpdateElectionTerm updateElectionTerm = new UpdateElectionTerm(5, "member2");
    mockRaftActor.handleRecover(updateElectionTerm);

    verify(mockSupport).handleRecoveryMessage(same(snapshotOffer), any(PersistentDataProvider.class));
    verify(mockSupport).handleRecoveryMessage(same(logEntry), any(PersistentDataProvider.class));
    verify(mockSupport).handleRecoveryMessage(same(applyJournalEntries), any(PersistentDataProvider.class));
    verify(mockSupport).handleRecoveryMessage(same(deleteEntries), any(PersistentDataProvider.class));
    verify(mockSupport).handleRecoveryMessage(same(updateElectionTerm), any(PersistentDataProvider.class));
}