Example usage for java.util Date from

List of usage examples for java.util Date from

Introduction

In this page you can find the example usage for java.util Date from.

Prototype

public static Date from(Instant instant) 

Source Link

Document

Obtains an instance of Date from an Instant object.

Usage

From source file:com.ikanow.aleph2.data_import_manager.analytics.utils.TestAnalyticTriggerCrudUtils.java

@Test
public void test_getTriggersToCheck() throws InterruptedException {
    assertEquals(0, _test_crud.countObjects().join().intValue());

    final DataBucketBean bucket = buildBucket("/test/check/triggers", true);

    // Just set the test up:
    {// ww w .  j a v a 2s  . co  m
        final Stream<AnalyticTriggerStateBean> test_stream = AnalyticTriggerBeanUtils
                .generateTriggerStateStream(bucket, false, Optional.empty());
        final List<AnalyticTriggerStateBean> test_list = test_stream.collect(Collectors.toList());

        assertEquals(8L, test_list.size());//(8 not 7 because we only dedup at the DB)

        final Map<Tuple2<String, String>, List<AnalyticTriggerStateBean>> grouped_triggers = test_list.stream()
                .collect(Collectors.groupingBy(t -> Tuples._2T(t.bucket_name(), null)));

        AnalyticTriggerCrudUtils.storeOrUpdateTriggerStage(bucket, _test_crud, grouped_triggers).join();

        assertEquals(7L, _test_crud.countObjects().join().intValue());
    }

    // Check the triggers:
    {
        final Map<Tuple2<String, String>, List<AnalyticTriggerStateBean>> res = AnalyticTriggerCrudUtils
                .getTriggersToCheck(_test_crud).join();

        assertEquals("Just one bucket", 1, res.keySet().size());

        final List<AnalyticTriggerStateBean> triggers = res.values().stream().findFirst().get();
        assertEquals("One trigger for each resource", 3, triggers.size());

        assertTrue("External triggers",
                triggers.stream().allMatch(trigger -> null != trigger.input_resource_combined()));

        // Save the triggers

        //DEBUG
        //this.printTriggerDatabase();

        AnalyticTriggerCrudUtils.updateTriggerStatuses(_test_crud, triggers.stream(),
                Date.from(Instant.now().plusSeconds(2)), Optional.empty()).join();

        //DEBUG
        //this.printTriggerDatabase();         
    }

    // Try again
    {
        final Map<Tuple2<String, String>, List<AnalyticTriggerStateBean>> res = AnalyticTriggerCrudUtils
                .getTriggersToCheck(_test_crud).join();

        assertEquals("None this time", 0, res.keySet().size());
    }

    // Activate the internal jobs and the external triggers, and set the times back
    // (this time will get the job deps but not the triggers)

    {
        // activates external with bucket_active
        AnalyticTriggerCrudUtils
                .updateTriggersWithBucketOrJobActivation(_test_crud, bucket, Optional.empty(), Optional.empty())
                .join();

        // activate internal with bucket and job active
        AnalyticTriggerCrudUtils.updateTriggersWithBucketOrJobActivation(_test_crud, bucket,
                Optional.of(bucket.analytic_thread().jobs()), Optional.empty()).join();

        //(just update the next trigger time)
        _test_crud
                .updateObjectsBySpec(CrudUtils.allOf(AnalyticTriggerStateBean.class), Optional.empty(),
                        CrudUtils.update(AnalyticTriggerStateBean.class).set(
                                AnalyticTriggerStateBean::next_check, Date.from(Instant.now().minusSeconds(2))))
                .join();

        final Map<Tuple2<String, String>, List<AnalyticTriggerStateBean>> res = AnalyticTriggerCrudUtils
                .getTriggersToCheck(_test_crud).join();

        final List<AnalyticTriggerStateBean> triggers = res.values().stream().findFirst().get();
        assertEquals("One trigger for each job dep", 4, triggers.size());

        assertFalse("Should be external triggers",
                triggers.stream().allMatch(trigger -> null == trigger.job_name()));

        AnalyticTriggerCrudUtils.updateTriggerStatuses(_test_crud, triggers.stream(),
                Date.from(Instant.now().plusSeconds(2)), Optional.empty()).join();
    }

    // Try again
    {
        final Map<Tuple2<String, String>, List<AnalyticTriggerStateBean>> res = AnalyticTriggerCrudUtils
                .getTriggersToCheck(_test_crud).join();

        assertEquals("None this time", 0, res.keySet().size());
    }

    // Activate the jobs "properly"

    {
        AnalyticTriggerCrudUtils.createActiveBucketOrJobRecord(_test_crud, bucket,
                Optional.of(bucket.analytic_thread().jobs().stream().findFirst().get()), Optional.empty())
                .join();

        //DEBUG
        //this.printTriggerDatabase();

        final Map<Tuple2<String, String>, List<AnalyticTriggerStateBean>> res2 = AnalyticTriggerCrudUtils
                .getTriggersToCheck(_test_crud).join();

        assertEquals("Got the one active bucket", 1, res2.keySet().size());

        final List<AnalyticTriggerStateBean> triggers = res2.values().stream().findFirst().get();
        assertEquals("One trigger for the one active job + 1 for the bucket", 2, triggers.size());

    }
}

From source file:com.vmware.photon.controller.api.client.resource.VmApiTest.java

@Test
public void testAttachDiskToVm() throws IOException {
    Task responseTask = new Task();
    responseTask.setId("12345");
    responseTask.setState("QUEUED");
    responseTask.setQueuedTime(Date.from(Instant.now()));

    ObjectMapper mapper = new ObjectMapper();
    String serializedTask = mapper.writeValueAsString(responseTask);

    setupMocks(serializedTask, HttpStatus.SC_CREATED);

    VmApi vmApi = new VmApi(restClient);

    Task task = vmApi.attachDisk("foo", new VmDiskOperation());
    assertEquals(task, responseTask);//from   w  ww.  j  a va2s.c o m
}

From source file:com.vmware.photon.controller.api.client.resource.VmRestApiTest.java

@Test
public void testAttachDiskToVm() throws IOException {
    Task responseTask = new Task();
    responseTask.setId("12345");
    responseTask.setState("QUEUED");
    responseTask.setQueuedTime(Date.from(Instant.now()));

    ObjectMapper mapper = new ObjectMapper();
    String serializedTask = mapper.writeValueAsString(responseTask);

    setupMocks(serializedTask, HttpStatus.SC_CREATED);

    VmApi vmApi = new VmRestApi(restClient);

    Task task = vmApi.attachDisk("foo", new VmDiskOperation());
    assertEquals(task, responseTask);//from   www  .j a v a2s.c  o m
}

From source file:com.vmware.photon.controller.api.client.resource.VmApiTest.java

@Test
public void testAttachDiskToVmAsync() throws IOException, InterruptedException {
    final Task responseTask = new Task();
    responseTask.setId("12345");
    responseTask.setState("QUEUED");
    responseTask.setQueuedTime(Date.from(Instant.now()));

    ObjectMapper mapper = new ObjectMapper();
    String serializedTask = mapper.writeValueAsString(responseTask);

    setupMocks(serializedTask, HttpStatus.SC_CREATED);

    VmApi vmApi = new VmApi(restClient);
    final CountDownLatch latch = new CountDownLatch(1);

    vmApi.attachDiskAsync("foo", new VmDiskOperation(), new FutureCallback<Task>() {
        @Override/* w  ww .j a  v  a  2s  .  co  m*/
        public void onSuccess(@Nullable Task result) {
            assertEquals(result, responseTask);
            latch.countDown();
        }

        @Override
        public void onFailure(Throwable t) {
            fail(t.toString());
            latch.countDown();
        }
    });

    assertThat(latch.await(COUNTDOWNLATCH_AWAIT_TIMEOUT, TimeUnit.SECONDS), is(true));
}

From source file:com.vmware.photon.controller.api.client.resource.VmRestApiTest.java

@Test
public void testAttachDiskToVmAsync() throws IOException, InterruptedException {
    final Task responseTask = new Task();
    responseTask.setId("12345");
    responseTask.setState("QUEUED");
    responseTask.setQueuedTime(Date.from(Instant.now()));

    ObjectMapper mapper = new ObjectMapper();
    String serializedTask = mapper.writeValueAsString(responseTask);

    setupMocks(serializedTask, HttpStatus.SC_CREATED);

    VmApi vmApi = new VmRestApi(restClient);
    final CountDownLatch latch = new CountDownLatch(1);

    vmApi.attachDiskAsync("foo", new VmDiskOperation(), new FutureCallback<Task>() {
        @Override//from w  w  w  .  j a  va 2s . co m
        public void onSuccess(@Nullable Task result) {
            assertEquals(result, responseTask);
            latch.countDown();
        }

        @Override
        public void onFailure(Throwable t) {
            fail(t.toString());
            latch.countDown();
        }
    });

    assertThat(latch.await(COUNTDOWNLATCH_AWAIT_TIMEOUT, TimeUnit.SECONDS), is(true));
}

From source file:ru.anr.base.BaseParent.java

/**
 * We have to use old Date object, because Hibernate/JPA does not support
 * Java 8 dates (see https://java.net/jira/browse/JPA_SPEC-63 or
 * https://hibernate.atlassian.net/browse/HHH-8844).
 * /*from w  w  w.  j a  va 2  s .co  m*/
 * @param dateTime
 *            Date time in Java 8 format
 * @return Old Date object
 */
public static Date date(ZonedDateTime dateTime) {

    return Date.from(dateTime.toInstant());
}

From source file:bamboo.trove.rule.RuleChangeUpdateManager.java

@VisibleForTesting
public SolrQuery convertRuleToSearch(CdxRule rule, String notLastIndexed) {
    // URL complexity first
    List<String> urlQueries = new ArrayList<>();
    for (String url : rule.getUrlPatterns()) {
        if (!url.trim().isEmpty()) {
            urlQueries.add(urlSearch(url));
        }//from w ww  . j a  v a2  s.  c  o m
    }
    if (urlQueries.isEmpty()) {
        urlQueries.add("*:*");
    }
    SolrQuery query = createQuery("(" + StringUtils.join(urlQueries, ") OR (") + ")");

    // Filter out stuff we have touched already this run
    query.addFilterQuery(notLastIndexed);
    // Filter for Embargo
    if (rule.getPeriod() != null && !rule.getPeriod().isZero()) {
        // TODAY +/- embargo period
        ZonedDateTime today = ZonedDateTime.ofInstant(CdxRestrictionService.TODAY.toInstant(), TZ);
        Date embargoStart = Date.from(today.minus(rule.getPeriod()).toInstant());
        query.addFilterQuery(SolrEnum.DATE + ":[" + format.format(embargoStart) + " TO *]");
    }
    // Filter for Capture date
    if (rule.getCaptured() != null && rule.getCaptured().hasData()) {
        query.addFilterQuery(SolrEnum.DATE + ":[" + format.format(rule.getCaptured().start) + " TO "
                + format.format(rule.getCaptured().end) + "]");
    }
    // Worth noting we don't filter for access date because it is one of the
    // deciding data points in whether or not to run this query at all.
    return query;
}

From source file:org.silverpeas.components.gallery.notification.user.AlbumMediaNotificationManagerTest.java

private void assertJobScheduled(final JobTrigger jobTrigger, final OffsetDateTime nowReference,
        final long delayOffset) {
    final long delayInMilliseconds = jobTrigger.getStartDate().getTime()
            - Date.from(nowReference.toInstant()).getTime() - delayOffset;
    assertThat(delayInMilliseconds, greaterThanOrEqualTo(10000L));
    assertThat(delayInMilliseconds, lessThanOrEqualTo(11000L));
    verify(userNotificationManager, times(0))
            .buildAndSend(any(GalleryAlbumMediaSubscriptionNotificationBuilder.class));
}

From source file:com.vmware.photon.controller.api.client.resource.VmApiTest.java

@Test
public void testDetachDiskFromVm() throws IOException {
    Task responseTask = new Task();
    responseTask.setId("12345");
    responseTask.setState("QUEUED");
    responseTask.setQueuedTime(Date.from(Instant.now()));

    ObjectMapper mapper = new ObjectMapper();
    String serializedTask = mapper.writeValueAsString(responseTask);

    setupMocks(serializedTask, HttpStatus.SC_CREATED);

    VmApi vmApi = new VmApi(restClient);

    Task task = vmApi.detachDisk("foo", new VmDiskOperation());
    assertEquals(task, responseTask);// w  w w .  j a v  a  2 s  .com
}

From source file:com.vmware.photon.controller.api.client.resource.VmRestApiTest.java

@Test
public void testDetachDiskFromVm() throws IOException {
    Task responseTask = new Task();
    responseTask.setId("12345");
    responseTask.setState("QUEUED");
    responseTask.setQueuedTime(Date.from(Instant.now()));

    ObjectMapper mapper = new ObjectMapper();
    String serializedTask = mapper.writeValueAsString(responseTask);

    setupMocks(serializedTask, HttpStatus.SC_CREATED);

    VmApi vmApi = new VmRestApi(restClient);

    Task task = vmApi.detachDisk("foo", new VmDiskOperation());
    assertEquals(task, responseTask);//w w w . j  a va 2  s  .c o  m
}