List of usage examples for java.time ZonedDateTime of
public static ZonedDateTime of(int year, int month, int dayOfMonth, int hour, int minute, int second, int nanoOfSecond, ZoneId zone)
From source file:org.primeframework.mvc.parameter.convert.converters.ZonedDateTimeConverterTest.java
@Test public void toStrings() { GlobalConverter converter = new ZonedDateTimeConverter(new MockConfiguration()); String str = converter.convertToString(ZonedDateTime.class, null, "testExpr", null); assertNull(str);//www . j a va2 s . co m str = converter.convertToString(ZonedDateTime.class, MapBuilder.asMap("dateTimeFormat", "MM-dd-yyyy"), "testExpr", ZonedDateTime.of(2008, 7, 8, 1, 1, 1, 0, ZoneId.systemDefault())); assertEquals(str, "07-08-2008"); }
From source file:com.example.geomesa.kafka.KafkaQuickStart.java
public static void addSimpleFeatures(SimpleFeatureType sft, FeatureStore producerFS, String visibility) throws InterruptedException, IOException { final int MIN_X = -180; final int MAX_X = 180; final int MIN_Y = -90; final int MAX_Y = 90; final int DX = 2; final int DY = 1; final String[] PEOPLE_NAMES = { "James", "John", "Peter", "Hannah", "Claire", "Gabriel" }; final long SECONDS_PER_YEAR = 365L * 24L * 60L * 60L; final Random random = new Random(); final ZonedDateTime MIN_DATE = ZonedDateTime.of(2015, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); SimpleFeatureBuilder builder = new SimpleFeatureBuilder(sft); DefaultFeatureCollection featureCollection = new DefaultFeatureCollection(); // creates and updates two SimpleFeatures. // the first time this for loop runs the two SimpleFeatures are created. // in the subsequent iterations of the for loop, the two SimpleFeatures are updated. int numFeatures = (MAX_X - MIN_X) / DX; for (int i = 1; i <= numFeatures; i++) { builder.add(PEOPLE_NAMES[i % PEOPLE_NAMES.length]); // name builder.add((int) Math.round(random.nextDouble() * 110)); // age builder.add(Date.from(/*from www . ja v a 2 s.c om*/ MIN_DATE.plusSeconds((int) Math.round(random.nextDouble() * SECONDS_PER_YEAR)).toInstant())); // dtg builder.add(WKTUtils$.MODULE$.read("POINT(" + (MIN_X + DX * i) + " " + (MIN_Y + DY * i) + ")")); // geom SimpleFeature feature1 = builder.buildFeature("1"); feature1.getUserData().put(Hints.USE_PROVIDED_FID, Boolean.TRUE); builder.add(PEOPLE_NAMES[(i + 1) % PEOPLE_NAMES.length]); // name builder.add((int) Math.round(random.nextDouble() * 110)); // age builder.add(Date.from( MIN_DATE.plusSeconds((int) Math.round(random.nextDouble() * SECONDS_PER_YEAR)).toInstant())); // dtg builder.add(WKTUtils$.MODULE$.read("POINT(" + (MIN_X + DX * i) + " " + (MAX_Y - DY * i) + ")")); // geom SimpleFeature feature2 = builder.buildFeature("2"); feature2.getUserData().put(Hints.USE_PROVIDED_FID, Boolean.TRUE); if (visibility != null) { feature1.getUserData().put("geomesa.feature.visibility", visibility); feature2.getUserData().put("geomesa.feature.visibility", visibility); } // write the SimpleFeatures to Kafka featureCollection.add(feature1); featureCollection.add(feature2); producerFS.addFeatures(featureCollection); featureCollection.clear(); // wait 100 ms in between updating SimpleFeatures to simulate a stream of data Thread.sleep(100); } }
From source file:io.stallion.tests.integration.jobs.JobsTests.java
/** * Tests processing jobs with no threading involved for easy debugging. *///from w ww .jav a 2 s . c o m @Test public void testJobProcessing() throws Exception { ExampleJobOne.RUN_COUNT = 0; ExampleJobTwo.RUN_COUNT = 0; ExampleJobThree.RUN_COUNT = 0; // Define and load job 1, to run at 30 minutes after the hour JobDefinition job1 = new JobDefinition() { { setJobClass(ExampleJobOne.class); setAlertThresholdMinutes(150); setSchedule(new Schedule() { { minutes(30); everyHour(); everyDay(); everyMonth(); verify(); } }); } }; // Define and load job 2, to run at 12:30 every day JobDefinition job2 = new JobDefinition().setJobClass(ExampleJobTwo.class).setAlertThresholdMinutes(3000) .setSchedule(new Schedule().minutes(30).hours(12).everyDay().everyMonth().verify()); // Define and load job 3, to run at 5PM on Tuesday JobDefinition job3 = new JobDefinition().setJobClass(ExampleJobThree.class).setAlertThresholdMinutes(3000) .setSchedule( new Schedule().minutes(0).hours(17).daysOfWeek(DayOfWeek.TUESDAY).everyMonth().verify()); ZonedDateTime now = ZonedDateTime.of(2015, 1, 18, 10, 40, 12, 0, ZoneId.of("UTC")); JobCoordinator.instance().registerJobForTest(job1, now); JobCoordinator.instance().registerJobForTest(job2, now); JobCoordinator.instance().registerJobForTest(job3, now); // Run for time at 11:30 - Job 1 should run //now = ZonedDateTime.of(2015, 1, 18, 11, 30, 7, 121, ZoneId.of("UTC")); //JobCoordinator.instance().resetForDateTime(now.minusMinutes(1)).executeJobsForCurrentTime(now); JobCoordinator.instance() .executeJobsForCurrentTime(ZonedDateTime.of(2015, 1, 18, 11, 30, 7, 121, ZoneId.of("UTC"))); assertEquals(1, ExampleJobOne.RUN_COUNT); assertEquals(0, ExampleJobTwo.RUN_COUNT); assertEquals(0, ExampleJobThree.RUN_COUNT); // Run for time at 11:30 again - no additional runs should happen JobCoordinator.instance() .executeJobsForCurrentTime(ZonedDateTime.of(2015, 1, 18, 11, 30, 7, 121, ZoneId.of("UTC"))); //JobCoordinator.instance().resetForDateTime(now.minusMinutes(1)).executeJobsForCurrentTime(now); assertEquals(1, ExampleJobOne.RUN_COUNT); assertEquals(0, ExampleJobTwo.RUN_COUNT); assertEquals(0, ExampleJobThree.RUN_COUNT); // Run for time 12:30 - Job 1 and Job 2 should run JobCoordinator.instance() .executeJobsForCurrentTime(ZonedDateTime.of(2015, 1, 18, 12, 30, 7, 121, ZoneId.of("UTC"))); //JobCoordinator.instance().resetForDateTime(now.minusMinutes(1)).executeJobsForCurrentTime(now); assertEquals(1, ExampleJobTwo.RUN_COUNT); assertEquals(2, ExampleJobOne.RUN_COUNT); assertEquals(0, ExampleJobThree.RUN_COUNT); // Run for time 5PM monday - no jobs should run JobCoordinator.instance() .executeJobsForCurrentTime(ZonedDateTime.of(2015, 1, 19, 5, 0, 7, 121, ZoneId.of("UTC"))); //JobCoordinator.instance().resetForDateTime(now.minusMinutes(1)).executeJobsForCurrentTime(now); assertEquals(2, ExampleJobOne.RUN_COUNT); assertEquals(1, ExampleJobTwo.RUN_COUNT); assertEquals(0, ExampleJobThree.RUN_COUNT); // Run for 12:30 Tuesday - Job 1 and Job 2 should run // (After first running minutes before to get the time reset) JobCoordinator.instance() .executeJobsForCurrentTime(ZonedDateTime.of(2015, 1, 20, 12, 25, 7, 121, ZoneId.of("UTC"))); JobCoordinator.instance() .executeJobsForCurrentTime(ZonedDateTime.of(2015, 1, 20, 12, 30, 7, 121, ZoneId.of("UTC"))); //JobCoordinator.instance().resetForDateTime(now.minusMinutes(1)).executeJobsForCurrentTime(now); assertEquals(3, ExampleJobOne.RUN_COUNT); assertEquals(2, ExampleJobTwo.RUN_COUNT); assertEquals(0, ExampleJobThree.RUN_COUNT); // Run for time 5PM Tuesday - job 3 should run JobCoordinator.instance() .executeJobsForCurrentTime(ZonedDateTime.of(2015, 1, 20, 17, 0, 7, 121, ZoneId.of("UTC"))); JobCoordinator.instance() .executeJobsForCurrentTime(ZonedDateTime.of(2015, 1, 20, 17, 0, 7, 121, ZoneId.of("UTC"))); // JobCoordinator.instance().executeJobsForCurrentTime( // ZonedDateTime.of(2015, 1, 20, 17, 0, 7, 121, ZoneId.of("UTC"))); assertEquals(3, ExampleJobOne.RUN_COUNT); assertEquals(2, ExampleJobTwo.RUN_COUNT); assertEquals(1, ExampleJobThree.RUN_COUNT); }
From source file:alfio.repository.EventRepositoryTest.java
@Test public void testSQLInsertedDatesRespectTheirTimeZone() throws Exception { //these are the values of what we have inserted in the SQL insert script TimeZone eventTimeZone = TimeZone.getTimeZone("America/New_York"); ZoneId eventZoneId = eventTimeZone.toZoneId(); ZonedDateTime beginEventDate = ZonedDateTime.of(2015, 10, 10, 0, 0, 0, 0, eventZoneId); ZonedDateTime endEventDate = ZonedDateTime.of(2015, 10, 10, 23, 59, 0, 0, eventZoneId); Event e = eventRepository.findById(0); assertNotNull("Event not found in DB", e); assertEquals("Begin date is not correct", e.getBegin(), beginEventDate); assertEquals("End date is not correct", e.getEnd(), endEventDate); //since when debugging the toString method is used .... and it rely on the system TimeZone, we test it too System.out.println(e.getBegin().toString()); System.out.println(e.getEnd().toString()); }
From source file:io.stallion.dataAccess.file.ListingEndpoints.java
@GET @Path("/rss.xml") @Produces("text/xml") public String rss() throws Exception { Map<String, Object> context = makeContext(); Pager pager = filterChain().sort("publishDate", "desc").pager(0, 20); context.put("postsPager", pager); context.put("blogUrl", Context.getSettings().getSiteUrl() + config.getFullPath()); ZonedDateTime buildTime = ZonedDateTime.of(2015, 1, 1, 12, 0, 0, 0, GeneralUtils.UTC); if (pager.getItems().size() > 0) { TextItem item = (TextItem) pager.getItems().get(0); buildTime = item.getPublishDate().plusMinutes(1); }/*w ww.j a v a 2 s . c o m*/ context.put("generator", Settings.instance().getMetaGenerator()); context.put("lastBuildDate", DateUtils.formatLocalDateFromZonedDate(buildTime, "EEE, dd MMM yyyy HH:mm:ss Z")); return TemplateRenderer.instance().renderTemplate(getClass().getResource("/templates/rss.jinja").toString(), context); }
From source file:com.example.geomesa.cassandra.CassandraQuickStart.java
static FeatureCollection createNewFeatures(SimpleFeatureType simpleFeatureType, int numNewFeatures) { DefaultFeatureCollection featureCollection = new DefaultFeatureCollection(); String id;/*from w w w. j av a 2s . co m*/ Object[] NO_VALUES = {}; String[] PEOPLE_NAMES = { "Addams", "Bierce", "Clemens" }; Long SECONDS_PER_YEAR = 365L * 24L * 60L * 60L; Random random = new Random(5771); ZonedDateTime MIN_DATE = ZonedDateTime.of(2014, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); Double MIN_X = -78.0; Double MIN_Y = -39.0; Double DX = 2.0; Double DY = 2.0; for (int i = 0; i < numNewFeatures; i++) { // create the new (unique) identifier and empty feature shell id = "Observation." + Integer.toString(i); SimpleFeature simpleFeature = SimpleFeatureBuilder.build(simpleFeatureType, NO_VALUES, id); // be sure to tell GeoTools explicitly that you want to use the ID you provided simpleFeature.getUserData().put(Hints.USE_PROVIDED_FID, java.lang.Boolean.TRUE); // populate the new feature's attributes // string value simpleFeature.setAttribute("Who", PEOPLE_NAMES[i % PEOPLE_NAMES.length]); // long value simpleFeature.setAttribute("What", i); // location: construct a random point within a 2-degree-per-side square double x = MIN_X + random.nextDouble() * DX; double y = MIN_Y + random.nextDouble() * DY; Geometry geometry = WKTUtils.read("POINT(" + x + " " + y + ")"); // date-time: construct a random instant within a year simpleFeature.setAttribute("Where", geometry); ZonedDateTime dateTime = MIN_DATE.plusSeconds((int) Math.round(random.nextDouble() * SECONDS_PER_YEAR)); simpleFeature.setAttribute("When", Date.from(dateTime.toInstant())); // another string value // "Why"; left empty, showing that not all attributes need values // accumulate this new feature in the collection featureCollection.add(simpleFeature); } return featureCollection; }
From source file:io.stallion.tests.integration.aMinimalSite.AMinimalSiteTest.java
@Test public void testMarkdown() throws IOException { String page = IOUtils.toString(getClass().getResource("/text_files/busted-page.txt"), "UTF-8"); TextFilePersister persister = new TextFilePersister(); persister.setModelClass(TextItem.class); TextItem item = persister.fromString(page, Paths.get("/naming-things.txt")); assertEquals("/how-to-name-variables", item.getSlug()); // publishDate:2013-12-12 11:30:00 ZonedDateTime dt = ZonedDateTime.of(2013, 12, 12, 11, 30, 0, 0, ZoneId.of("America/New_York")); assertEquals(dt, item.getPublishDate()); }
From source file:org.openlmis.fulfillment.web.OrderControllerTest.java
@Before public void setUp() { when(dateHelper.getCurrentDateTimeWithSystemZone()) .thenReturn(ZonedDateTime.of(2015, 5, 7, 10, 5, 20, 500, ZoneId.systemDefault())); OrderNumberConfiguration orderNumberConfiguration = new OrderNumberConfiguration("prefix", false, false, false);//from ww w . ja va 2 s .c om when(orderService.createOrder(orderDto, lastUpdaterId)).thenReturn(order); when(programReferenceDataService.findOne(any())).thenReturn(programDto); when(authentication.isClientOnly()).thenReturn(true); when(orderNumberConfigurationRepository.findAll()).thenReturn(Lists.newArrayList(orderNumberConfiguration)); when(extensionManager.getExtension(any(), any())).thenReturn(orderNumberGenerator); when(orderNumberGenerator.generate(any())).thenReturn(ORDER_NUMBER); when(proofOfDeliveryRepository.save(any(ProofOfDelivery.class))).thenReturn(proofOfDelivery); when(orderDtoBuilder.build(order)).thenReturn(orderDto); when(shipmentService.save(any(Shipment.class))) .thenAnswer(invocation -> invocation.getArgumentAt(0, Shipment.class)); orderDto.setUpdaterId(lastUpdaterId); ReflectionTestUtils.setField(exporterBuilder, "serviceUrl", SERVICE_URL); ReflectionTestUtils.setField(exporterBuilder, "facilities", facilities); ReflectionTestUtils.setField(exporterBuilder, "programs", programs); ReflectionTestUtils.setField(exporterBuilder, "periods", periods); ReflectionTestUtils.setField(exporterBuilder, "users", users); }
From source file:com.tesobe.obp.transport.spi.MockResponder.java
/** * Return a total of seventeen transactions, posted one per day counting down * from January 1st, 1999..//w w w. j a v a 2 s . co m * * @param state local state, can store result set over page requests * @param p pager * @param accountId account * @param bankId bank * * @return 17 transactions in as many pages, as needed. */ protected List<Map<String, Object>> transactions(String state, Decoder.Pager p, String accountId, String bankId) { ZonedDateTime completed = ZonedDateTime.of(1999, 1, 31, 0, 0, 0, 0, UTC); final List<Map<String, Object>> data = new ArrayList<>(); // todo stream int numTransactions = 17; for (int i = 0; i < numTransactions; ++i) { int dayOfMonth = Math.min(i, 26) + 1; // safe month length ZonedDateTime posted = ZonedDateTime.of(1999, 1, dayOfMonth, 0, 0, 0, 0, UTC); HashMap<String, Object> t = new HashMap<>(); t.put(Transaction.accountId, accountId); t.put(Transaction.bankId, bankId); t.put(Transaction.transactionId, format(Locale.US, "transactionId-%d", i)); t.put(Transaction.completedDate, completed); t.put(Transaction.postedDate, posted); t.put(Transaction.counterPartyId, format(Locale.US, "counterPartyId-%d", i)); data.add(t); } List<Map<String, Object>> processed = data.stream().filter(filter(p)).sorted(sorter(p)).collect(toList()); cache.put(state, processed); return processed; }
From source file:com.example.geomesa.lambda.LambdaQuickStart.java
@Override public void run() { try {//from www . j a v a 2 s. com // create the schema final String sftName = "lambda-quick-start"; final String sftSchema = "name:String,age:Int,dtg:Date,*geom:Point:srid=4326"; SimpleFeatureType sft = SimpleFeatureTypes.createType(sftName, sftSchema); if (ds.getSchema(sftName) != null) { out.println("'" + sftName + "' feature type already exists - quick start will not work correctly"); out.println("Please delete it and re-run"); return; } out.println("Creating feature type '" + sftName + "'"); ds.createSchema(sft); out.println("Feature type created - register the layer '" + sftName + "' in geoserver then hit <enter> to continue"); in.read(); SimpleFeatureWriter writer = ds.getFeatureWriterAppend(sftName, Transaction.AUTO_COMMIT); out.println("Writing features to Kafka... refresh GeoServer layer preview to see changes"); // creates and adds SimpleFeatures to the producer every 1/5th of a second final int COUNT = 1000; final int MIN_X = -180; final int MAX_X = 180; final int MIN_Y = -90; final int MAX_Y = 90; final int DX = 2; final String[] PEOPLE_NAMES = { "James", "John", "Peter", "Hannah", "Claire", "Gabriel" }; final long SECONDS_PER_YEAR = 365L * 24L * 60L * 60L; ZonedDateTime MIN_DATE = ZonedDateTime.of(2015, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); final Random random = new Random(); int numUpdates = (MAX_X - MIN_X) / DX; for (int j = 0; j < numUpdates; j++) { for (int i = 0; i < COUNT; i++) { SimpleFeature feature = writer.next(); feature.setAttribute(0, PEOPLE_NAMES[i % PEOPLE_NAMES.length]); // name feature.setAttribute(1, (int) Math.round(random.nextDouble() * 110)); // age feature.setAttribute(2, Date.from(MIN_DATE .plusSeconds((int) Math.round(random.nextDouble() * SECONDS_PER_YEAR)).toInstant())); // dtg feature.setAttribute(3, "POINT(" + (MIN_X + (DX * j)) + " " + (MIN_Y + ((MAX_Y - MIN_Y) / ((double) COUNT)) * i) + ")"); // geom feature.getUserData().put(Hints.PROVIDED_FID, String.format("%04d", i)); writer.write(); } Thread.sleep(200); } writer.close(); out.println("Waiting for expiry and persistence..."); long total = 0, persisted = 0; do { long newTotal = (long) ds.stats().getCount(sft, Filter.INCLUDE, true).get(); long newPersisted = (long) ((AccumuloDataStore) ds.persistence()).stats() .getCount(sft, Filter.INCLUDE, true).get(); if (newTotal != total || newPersisted != persisted) { total = newTotal; persisted = newPersisted; out.println("Total features: " + total + ", features persisted to Accumulo: " + persisted); } Thread.sleep(100); } while (persisted < COUNT || total > COUNT); } catch (Exception e) { throw new RuntimeException(e); } finally { ds.dispose(); } }