List of usage examples for java.time OffsetDateTime ofInstant
public static OffsetDateTime ofInstant(Instant instant, ZoneId zone)
From source file:org.openmhealth.shim.jawbone.mapper.JawboneDataPointMapper.java
/** * @param listEntryNode an individual entry node from the "items" array of a Jawbone endpoint response * @return a {@link DataPointHeader} for containing the appropriate information based on the input parameters *///from ww w . jav a 2s. com protected DataPointHeader getHeader(JsonNode listEntryNode, T measure) { DataPointAcquisitionProvenance.Builder provenanceBuilder = new DataPointAcquisitionProvenance.Builder( RESOURCE_API_SOURCE_NAME); if (isSensed(listEntryNode)) { provenanceBuilder.setModality(SENSED); } DataPointAcquisitionProvenance acquisitionProvenance = provenanceBuilder.build(); asOptionalString(listEntryNode, "xid") .ifPresent(externalId -> acquisitionProvenance.setAdditionalProperty("external_id", externalId)); // TODO discuss the name of the external identifier, to make it clear it's the ID used by the source asOptionalLong(listEntryNode, "time_updated").ifPresent( sourceUpdatedDateTime -> acquisitionProvenance.setAdditionalProperty("source_updated_date_time", OffsetDateTime.ofInstant(Instant.ofEpochSecond(sourceUpdatedDateTime), ZoneId.of("Z")))); DataPointHeader header = new DataPointHeader.Builder(UUID.randomUUID().toString(), measure.getSchemaId()) .setAcquisitionProvenance(acquisitionProvenance).build(); // FIXME "shared" is never documented asOptionalBoolean(listEntryNode, "shared") .ifPresent(isShared -> header.setAdditionalProperty("shared", isShared)); return header; }
From source file:com.esri.geoportal.harvester.api.base.DataReferenceSerializer.java
private String formatIsoDate(Date date) { Instant instant = date.toInstant(); Calendar cal = Calendar.getInstance(); cal.setTime(date);/*from w w w . ja v a 2 s. co m*/ ZoneOffset zoneOffset = ZoneOffset.ofHours(cal.getTimeZone().getRawOffset() / (1000 * 60 * 60)); OffsetDateTime ofInstant = OffsetDateTime.ofInstant(instant, zoneOffset); return FORMATTER.format(ofInstant); }
From source file:org.openmhealth.shim.googlefit.mapper.GoogleFitDataPointMapper.java
/** * Converts a nanosecond timestamp from the Google Fit API into an offset datetime value. * * @param unixEpochNanosString the timestamp directly from the Google JSON document * @return an offset datetime object representing the input timestamp *///from w ww . j a va 2 s. co m public OffsetDateTime convertGoogleNanosToOffsetDateTime(String unixEpochNanosString) { return OffsetDateTime.ofInstant(Instant.ofEpochSecond(0, Long.parseLong(unixEpochNanosString)), ZoneId.of("Z")); }
From source file:com.amazonaws.services.kinesis.io.StringDataExtractor.java
/** * {@inheritDoc}/*from w w w . j a v a 2 s.c o m*/ */ @Override public List<AggregateData> getData(InputEvent event) throws SerializationException { try { int summaryIndex = -1; String dateString; OffsetDateTime dateValue; List<AggregateData> data = new ArrayList<>(); List<List<String>> content = serialiser.toClass(event); for (List<String> line : content) { if (line != null) { LabelSet labels = new LabelSet(); labels.withAlias(this.labelAttributeAlias); for (Integer key : this.labelIndicies) { labels.put("" + key, line.get(key)); } // get the unique index String uniqueId = null; if (this.usePartitionKeyForUnique) { uniqueId = event.getPartitionKey(); } else if (this.useSequenceForUnique) { uniqueId = event.getSequenceNumber(); } else { if (this.uniqueIdIndex != -1) { uniqueId = line.get(this.uniqueIdIndex); } } // get the date value from the line if (this.dateValueIndex != -1) { dateString = line.get(dateValueIndex); if (this.dateFormat != null) { dateValue = OffsetDateTime.parse(dateString, dateFormatter); } else { // no formatter, so treat as epoch seconds try { dateValue = OffsetDateTime.ofInstant( Instant.ofEpochMilli(Long.parseLong(dateString)), ZoneId.of("UTC")); } catch (Exception e) { LOG.error(String.format( "Unable to create Date Value element from item '%s' due to invalid format as Epoch Seconds", dateValueIndex)); throw new SerializationException(e); } } } else { dateValue = OffsetDateTime.now(ZoneId.of("UTC")); } // get the summed values if (this.aggregatorType.equals(AggregatorType.SUM)) { sumUpdates = new HashMap<>(); // get the positional sum items for (int i = 0; i < summaryIndicies.size(); i++) { summaryIndex = summaryIndicies.get(i); try { sumUpdates.put("" + summaryIndex, Double.parseDouble(line.get(summaryIndex))); } catch (NumberFormatException nfe) { LOG.error(String.format( "Unable to deserialise Summary '%s' due to NumberFormatException", i)); throw new SerializationException(nfe); } } } data.add(new AggregateData(uniqueId, labels, dateValue, sumUpdates)); } } return data; } catch (Exception e) { throw new SerializationException(e); } }
From source file:org.openmhealth.shim.ihealth.mapper.IHealthDataPointMapper.java
/** * This method transforms a timestamp from an iHealth response (which is in the form of local time as epoch * seconds) into an {@link OffsetDateTime} with the correct date/time and offset. The timestamps provided in * iHealth responses are not unix epoch seconds in UTC but instead a unix epoch seconds value that is offset by the * time zone of the data point.//from w w w . jav a 2s . c o m */ protected static OffsetDateTime getDateTimeWithCorrectOffset(Long localTimeAsEpochSeconds, ZoneOffset zoneOffset) { /* iHealth provides the local time of a measurement as if it had occurred in UTC, along with the timezone offset where the measurement occurred. To retrieve the correct OffsetDateTime, we must retain the local date/time value, but replace the timezone offset. */ return OffsetDateTime.ofInstant(Instant.ofEpochSecond(localTimeAsEpochSeconds), ZoneOffset.UTC) .withOffsetSameLocal(zoneOffset); }
From source file:org.openmhealth.shim.jawbone.mapper.JawboneDataPointMapper.java
/** * @param builder a {@link Measure} builder * @param listEntryNode an individual entry node from the "items" array of a Jawbone endpoint response *//*from w w w. ja v a 2 s. c o m*/ protected void setEffectiveTimeFrame(T.Builder builder, JsonNode listEntryNode) { Optional<Long> optionalStartTime = asOptionalLong(listEntryNode, "time_created"); Optional<Long> optionalEndTime = asOptionalLong(listEntryNode, "time_completed"); if (optionalStartTime.isPresent() && optionalStartTime.get() != null && optionalEndTime.isPresent() && optionalEndTime.get() != null) { ZoneId timeZoneForStartTime = getTimeZoneForTimestamp(listEntryNode, optionalStartTime.get()); ZoneId timeZoneForEndTime = getTimeZoneForTimestamp(listEntryNode, optionalEndTime.get()); OffsetDateTime startTime = OffsetDateTime.ofInstant(Instant.ofEpochSecond(optionalStartTime.get()), timeZoneForStartTime); OffsetDateTime endTime = OffsetDateTime.ofInstant(Instant.ofEpochSecond(optionalEndTime.get()), timeZoneForEndTime); builder.setEffectiveTimeFrame(TimeInterval.ofStartDateTimeAndEndDateTime(startTime, endTime)); } else if (optionalStartTime.isPresent() && optionalStartTime.get() != null) { ZoneId timeZoneForStartTime = getTimeZoneForTimestamp(listEntryNode, optionalStartTime.get()); builder.setEffectiveTimeFrame( OffsetDateTime.ofInstant(Instant.ofEpochSecond(optionalStartTime.get()), timeZoneForStartTime)); } }
From source file:com.amazonaws.services.kinesis.io.JsonDataExtractor.java
/** * {@inheritDoc}/*from ww w. jav a 2s . c o m*/ */ @Override public List<AggregateData> getData(InputEvent event) throws SerializationException { try { List<AggregateData> aggregateData = new ArrayList<>(); OffsetDateTime dateValue = null; JsonNode jsonContent = null; String dateString, summary = null; long localOffset = 0; List<String> items = (List<String>) serialiser.toClass(event); // log a warning if we didn't get anything back from the serialiser // - this could be OK, but probably isn't // it would be OK, for example, if you have filterRegex // if (items == null || items.size() == 0) // LOG.warn(String.format( // "Failed to deserialise any content for Record (Partition Key %s, Sequence %s", // event.getPartitionKey(), event.getSequenceNumber())); // process all the items returned by the serialiser for (String item : items) { // Convert the string to a Jackson JsonNode for navigation jsonContent = StreamAggregatorUtils.asJsonNode(item); sumUpdates = new HashMap<>(); LabelSet labels = new LabelSet(); for (String key : this.labelAttributes) { labels.put(key, StreamAggregatorUtils.readValueAsString(jsonContent, key)); } // get the unique ID for the event String uniqueId = null; if (this.uniqueIdAttribute != null) { switch (this.uniqueIdAttribute) { case StreamAggregator.REF_PARTITION_KEY: uniqueId = event.getPartitionKey(); break; case StreamAggregator.REF_SEQUENCE: uniqueId = event.getSequenceNumber(); break; default: uniqueId = StreamAggregatorUtils.readValueAsString(jsonContent, uniqueIdAttribute); break; } } // get the date value from the line if (dateValueAttribute != null) { dateString = StreamAggregatorUtils.readValueAsString(jsonContent, dateValueAttribute); // bail on no date returned if (dateString == null || dateString.equals("")) throw new SerializationException( String.format("Unable to read date value attribute %s from JSON Content %s", dateValueAttribute, item)); // turn date as long or string into Date if (this.dateFormat != null) { dateValue = OffsetDateTime.parse(dateString, dateFormatter); } else { // no formatter, so treat as epoch seconds try { dateValue = OffsetDateTime.ofInstant(Instant.ofEpochMilli(Long.parseLong(dateString)), ZoneId.of("UTC")); } catch (Exception e) { LOG.error(String.format( "Unable to create Date Value element from item '%s' due to invalid format as Epoch Seconds", dateValueAttribute)); throw new SerializationException(e); } } } else { // no date value attribute configured, so use now dateValue = OffsetDateTime.now(ZoneId.of("UTC")); } // get the summed values if (this.aggregatorType.equals(AggregatorType.SUM)) { // get the positional sum items for (String s : summaryConfig.getItemSet()) { try { summary = StreamAggregatorUtils.readValueAsString(jsonContent, s); // if a summary is not found in the data element, // then we simply continue without it if (summary != null) sumUpdates.put(s, Double.parseDouble(summary)); } catch (NumberFormatException nfe) { LOG.error(String .format("Unable to deserialise Summary '%s' due to NumberFormatException", s)); throw new SerializationException(nfe); } } } // get local offset if (offsetAttribute != null) { try { String offsetString = StreamAggregatorUtils.readValueAsString(jsonContent, offsetAttribute); if (offsetString != null) localOffset = Long.parseLong(offsetString); } catch (NumberFormatException nfe) { LOG.error(String.format( "Unable to deserialise local offset '%s' due to NumberFormatException", dateValueAttribute)); throw new SerializationException(nfe); } } aggregateData.add(new AggregateData(uniqueId, labels, dateValue, sumUpdates, localOffset)); } return aggregateData; } catch (Exception e) { throw new SerializationException(e); } }
From source file:org.cryptomator.frontend.webdav.servlet.DavNode.java
private Optional<DavProperty<?>> lastModifiedDateProperty(DavPropertyName name) { return attr.map(BasicFileAttributes::lastModifiedTime) // .map(FileTime::toInstant) // .map(creationTime -> OffsetDateTime.ofInstant(creationTime, ZoneOffset.UTC)) // .map(creationDate -> new DefaultDavProperty<>(name, DateTimeFormatter.RFC_1123_DATE_TIME.format(creationDate))); }
From source file:org.cryptomator.frontend.webdav.servlet.DavNode.java
private Optional<DavProperty<?>> creationDateProperty(DavPropertyName name) { return attr.map(BasicFileAttributes::creationTime) // .map(FileTime::toInstant) // .map(creationTime -> OffsetDateTime.ofInstant(creationTime, ZoneOffset.UTC)) // .map(creationDate -> new DefaultDavProperty<>(name, DateTimeFormatter.RFC_1123_DATE_TIME.format(creationDate))); }
From source file:am.ik.categolj3.api.git.GitStore.java
Author author(RevCommit commit) { String name = commit != null ? commit.getAuthorIdent().getName() : ""; Date date = commit != null ? commit.getAuthorIdent().getWhen() : new Date(); OffsetDateTime o = OffsetDateTime.ofInstant(date.toInstant(), ZoneId.systemDefault()); return new Author(name, o); }